这全是实例,慢慢研究吧,有C或C++的基础,Fortran学取来不难
我有许多,但都不是可用扩展名: gif, jpg, jpeg, png, bmp, txt, dat, doc, ppt, m, fig)的格式
无奈,本人就将用fortran90编写的并行程序给你了
===============================================================
program Jacobi_1
implicit none
include 'mpif.h'
integer ::steps
integer,parameter :: totalsize=16
!定义全局数组的规模
integer,parameter :: mysize=totalsize/4,steps=10
integer :: n,myid,numprocs,i,j,rc
real,dimension :: a(totalsize,mysize+2),b(totalsize,mysize+2)
!定义局部数组
integer :: begin_col,end_col,ierr
integer :: status(MPI_STATUS_SIZE)
call MPI_INIT(ierr)
call MPI_COMM_RANK(MPI_COMM_WORLD,myid,ierr)
call MPI_COMM_SIZE(MPI_COMM_WORLD,numprocs,ierr)
print *,"Process ",myid," of ",numprocs," is alive."
!数组初始化
do j=1,mysize+2
do i=1,totalsize
a(i,j)=0.0
end do
end do
if(myid==0)then
do i=1,totalsize
a(i,2)=8.0
end do
end if
if(myid==3)then
do i=1,totalsize
a(i,mysize+1)=8.0
end do
end if
do i=1,mysize+2
a(1,i)=8.0
a(totalsize,i)=8.0
end do
!Jacobi迭代部分
do n=1,steps
!从右侧的邻居得到数据
if(myid<3)then
call MPI_RECV(a(:,mysize+2),totalsize,MPI_REAL, &
myid+1,10,MPI_COMM_WORLD,status,ierr)
end if
!从左侧的邻居发送数据
if(myid>0)then
call MPI_SEND(a(:,2),totalsize,MPI_REAL, &
myid-1,10,MPI_COMM_WORLD,status,ierr)
end if
!向右侧的邻居发送数据
if(myid<3)then
call MPI_SEND(a(:,mysize+1),totalsize,MPI_REAL, &
myid+1,10,MPI_COMM_WORLD,ierr)
end if
!从左侧的邻居接收数据
if(myid>0)then
call MPI_RECV(a(:,2),totalsize,MPI_REAL, &
myid-1,10,MPI_COMM_WORLD,status,ierr)
end if
begin_col=2
end_col=mysize+1
if(myid==0)then
begin_col=3
end if
if(myid==3)then
end_col=mysize
end if
do j=begin_col,end_col
do i=2,totalsize-1
b(i,j)=(a(i,j+1)+a(i,j-1)+a(i+1,j)+a(i-1,j))*0.25
end do
end do
do j=begin_col,end_col
do i=2,totalsize-1
a(i,j)=b(j,i)
end do
end do
end do
do i=2,totalsize-1
print *,myid,(a(i,j),j=begin_col,end_col)
end do
call MPI_Finalize(rc)
end program Jacobi_1
==========================================================
program Jacobi_2
implicit none
include 'mpif.h'
integer ::steps
integer,parameter :: totalsize=16
!定义全局数组的规模
integer,parameter :: mysize=totalsize/4,steps=10
integer :: n,myid,numprocs,i,j,rc
real,dimension :: a(totalsize,mysize+2),b(totalsize,mysize+2)
!定义局部数组
integer :: begin_col,end_col,ierr
integer :: status(MPI_STATUS_SIZE)
call MPI_INIT(ierr)
call MPI_COMM_RANK(MPI_COMM_WORLD,myid,ierr)
call MPI_COMM_SIZE(MPI_COMM_WORLD,numprocs,ierr)
print *,"Process ",myid," of ",numprocs," is alive."
!数组初始化
do j=1,mysize+2
do i=1,totalsize
a(i,j)=0.0
end do
end do
if(myid==0)then
do i=1,totalsize
a(i,2)=8.0
end do
end if
if(myid==3)then
do i=1,totalsize
a(i,mysize+1)=8.0
end do
end if
do i=1,mysize+2
a(1,i)=8.0
a(totalsize,i)=8.0
end do
!开始迭代
do n=1,steps
!从左向右平移数据
if(myid==0)then
call MPI_SEND(a(:,mysize+1),totalsize,MPI_REAL, &
myid+1,10,MPI_COMM_WORLD,ierr)
else if(myid==3)then
call MPI_RECV(a(:,1),totalsize,MPI_REAL,myid+1, &
10,MPI_COMM_WORLD,status,ierr)
else
call MPI_SENDRECV(a(:,mysize+1),totalsize,MPI_REAL,&
myid+1,10,a(:,1),totalsize,MPI_REAL,&
myid-1,10,MPI_COMM_WORLD,status,ierr)
end if
!从右向左平移数据
if(myid==0)then
call MPI_RECV(a(:,mysize+2),totalsize,MPI_REAL, &
myid+1,10,MPI_COMM_WORLD,status,ierr)
else if(myid==3)then
call MPI_SEND(a(:,2),totalsize,MPI_REAL,myid-1, &
10,MPI_COMM_WORLD,ierr)
else
call MPI_SENDRECV(a(:,2),totalsize,MPI_REAL,myid-1,&
10,a(:,mysize+2),totalsize,MPI_REAL, &
myid+1,10,MPI_COMM_WORLD,status,ierr)
end if
begin_col=2
end_col=mysize+1
if(myid==0)then
begin_col=3
end if
if(myid==3)then
end_col=mysize
end if
do j=begin_col,end_col
do i=2,totalsize-1
b(i,j)=(a(i,j+1)+a(i,j-1)+a(i+1,j)+a(i-1,j))*0.25
end do
end do
do j=begin_col,end_col
do i=2,totalsize-1
a(i,j)=b(j,i)
end do
end do
end do
do i=2,totalsize-1
print *,myid,(a(i,j),j=begin_col,end_col)
end do
call MPI_Finalize(rc)
end program Jacobi_2
=============================================================
program Jacobi_3
implicit none
include 'mpif.h'
integer ::steps
integer,parameter :: totalsize=16
!定义全局数组的规模
integer,parameter :: mysize=totalsize/4,steps=10
integer :: n,myid,numprocs,i,j,rc
real,dimension :: a(totalsize,mysize+2),b(totalsize,mysize+2)
!定义局部数组
integer :: begin_col,end_col,ierr
integer :: left,right,tag1,tag2
integer :: status(MPI_STATUS_SIZE)
call MPI_INIT(ierr)
call MPI_COMM_RANK(MPI_COMM_WORLD,myid,ierr)
call MPI_COMM_SIZE(MPI_COMM_WORLD,numprocs,ierr)
print *,"Process ",myid," of ",numprocs," is alive."
!数组初始化
do j=1,mysize+2
do i=1,totalsize
a(i,j)=0.0
end do
end do
if(myid==0)then
do i=1,totalsize
a(i,2)=8.0
end do
end if
if(myid==3)then
do i=1,totalsize
a(i,mysize+1)=8.0
end do
end if
do i=1,mysize+2
a(1,i)=8.0
a(totalsize,i)=8.0
end do
tag1=3
tag2=4
!设置当前进程左右两侧的进程标识
if(myid>0)then
left=myid-1
else
left=MPI_PROC_NULL
end if
if(myid<3)then
right=myid+1
else
right=MPI_PROC_NULL
end if
!Jacobi迭代
do n=1,steps
!从左向右平移数据
call MPI_SENDRECV(a(:,mysize+1),totalsize,MPI_REAL, &
right,tag1,a(:,1),totalsize,MPI_REAL,&
left,tag1,MPI_COMM_WORLD,status,ierr)
!从右向左平移数据
call MPI_SENDRECV(a(:,2),totalsize,MPI_REAL,left, &
tag2,a(:,mysize+2),totalsize,MPI_REAL, &
right,tag2,MPI_COMM_WORLD,status,ierr)
begin_col=2
end_col=mysize+1
if(myid==0)then
begin_col=3
end if
if(myid==3)then
end_col=mysize
end if
do j=begin_col,end_col
do i=2,totalsize-1
b(i,j)=(a(i,j+1)+a(i,j-1)+a(i+1,j)+a(i-1,j))*0.25
end do
end do
do j=begin_col,end_col
do i=2,totalsize-1
a(i,j)=b(j,i)
end do
end do
end do
do i=2,totalsize-1
print *,myid,(a(i,j),j=begin_col,end_col)
end do
call MPI_Finalize(rc)
end program Jacobi_3 |