You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
# mpiexecjl -n 2 julia dcopy.jl
# This code trys to copy in parallel a El.DistMatrix to another El.DistMatrix
# The intention is for each rank to deal with just its portion of the data.
# My actual goal is to create a El.DistMatrix with 1 less column
# Please show me how do a copy with loops not just an assignment.
# The code works when a single rank is used.
using MPIClusterManagers
manager = MPIClusterManagers.start_main_loop(MPI_TRANSPORT_ALL)
@mpi_do manager begin
using Elemental
const El = Elemental
end
@mpi_do manager begin
using MPI
comm = MPI.COMM_WORLD
myrank = MPI.Comm_rank(comm)
num_ranks = MPI.Comm_size(comm)
MPI.Barrier(comm)
end
@mpi_do manager begin
height = 4
width = 6
dfrom = El.DistMatrix(Float32, El.STAR, El.VC)
El.zeros!(dfrom, height, width)
mul = 1.0/num_ranks
# dfrom is a DistMatrix where each value is its coordinate in the array
for i = 1:width
for j = 1:height
El.queueUpdate(dfrom, j, i, Float32(mul * (i*1000+j)))
end
end
El.processQueues(dfrom)
MPI.Barrier(comm)
dto = El.DistMatrix(Float32, El.STAR, El.VC)
# dto is the copy
El.zeros!(dto, height, width)
lh = El.localHeight(dfrom)
if lh != height
println("$myrank lh != height ")
return
end
lw = El.localWidth(dto)
# All members of a column are in a single rank.
for i = 1:lw
colto = El.globalCol(dto, i)
#println("$myrank $colto ")
for j = 1:height
#println("$myrank j= $j, colto= $colto\n")
El.queueUpdate(dto, j, colto, dfrom[j, colto])
end # j
end # i
El.processQueues(dto)
MPI.Barrier(comm)
# print dto
d = collect(dto)
dfromc = collect(dfrom)
#println("$myrank dfrom= $dfrom")
#println("$myrank dto= $dto")
if myrank == 0
println("------\n ")
println("* Lines with expected value.")
for i = 1:height
s = " "
sf = "*"
for j = 1:width
sf = sf * "$(dfromc[i, j]), "
s = s * "$(d[i, j]), "
end
println(sf, " ")
println(s," ")
end
end
end
@mpi_do manager Elemental.Finalize()
MPIClusterManagers.stop_main_loop(manager)
println("main loop stopped")
The text was updated successfully, but these errors were encountered:
The text was updated successfully, but these errors were encountered: