From 1623c1ff03c68c4738f7b96cdfd4f741495cbcee Mon Sep 17 00:00:00 2001 From: amartin Date: Mon, 25 Oct 2021 18:02:39 +1100 Subject: [PATCH 01/14] Adding more versions to MPI.jl [compat] section --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 6c11929..8b2ad2b 100644 --- a/Project.toml +++ b/Project.toml @@ -15,7 +15,7 @@ SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1" [compat] Gridap = "0.17" -MPI = "0.14, 0.15, 0.16" +MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19" PETSc_jll = "3.13" PartitionedArrays = "0.2.4" SparseMatricesCSR = "0.6.1" From a1b0377d7f5f785f5f7350fc2018c88e9275efea Mon Sep 17 00:00:00 2001 From: amartin Date: Mon, 25 Oct 2021 23:29:23 +1100 Subject: [PATCH 02/14] Adding SNES support (WIP) --- src/GridapPETSc.jl | 5 +- src/PETSC.jl | 59 ++++++++++-- src/PETScArrays.jl | 14 ++- src/PETScLinearSolvers.jl | 4 +- src/PETScNonlinearSolvers.jl | 139 +++++++++++++++++++++++++++++ test/PETScNonlinearSolversTests.jl | 25 ++++++ 6 files changed, 235 insertions(+), 11 deletions(-) create mode 100644 src/PETScNonlinearSolvers.jl create mode 100644 test/PETScNonlinearSolversTests.jl diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index 448fae0..dbdd563 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -66,7 +66,7 @@ end include("PETSC.jl") using GridapPETSc.PETSC: @check_error_code -using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC +using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, SNES #export PETSC export @check_error_code export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC @@ -82,6 +82,9 @@ include("PartitionedArrays.jl") export PETScLinearSolver include("PETScLinearSolvers.jl") +export PETScNonlinearSolver +include("PETScNonlinearSolvers.jl") + include("PETScAssembly.jl") end # module diff --git a/src/PETSC.jl b/src/PETSC.jl index e3ddb7c..5e2a258 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -26,14 +26,14 @@ let types_jl = joinpath(@__DIR__,"..","deps","PetscDataTypes.jl") if !isfile(types_jl) msg = """ GridapPETSc needs to be configured before use. Type - + pkg> build - + and try again. """ error(msg) end - + include(types_jl) end @@ -98,7 +98,7 @@ macro PETSC_VIEWER_STDOUT_SELF() quote PETSC_VIEWER_STDOUT_(MPI.COMM_SELF) end -end +end """ @PETSC_VIEWER_STDOUT_WORLD @@ -109,7 +109,7 @@ macro PETSC_VIEWER_STDOUT_WORLD() quote PETSC_VIEWER_STDOUT_(MPI.COMM_WORLD) end -end +end """ @PETSC_VIEWER_DRAW_SELF @@ -120,7 +120,7 @@ macro PETSC_VIEWER_DRAW_SELF() quote PETSC_VIEWER_DRAW_(MPI.COMM_SELF) end -end +end """ @PETSC_VIEWER_DRAW_WORLD @@ -131,7 +131,7 @@ macro PETSC_VIEWER_DRAW_WORLD() quote PETSC_VIEWER_DRAW_(MPI.COMM_WORLD) end -end +end # Vector related functions @@ -607,4 +607,49 @@ Base.convert(::Type{PC},p::Ptr{Cvoid}) = PC(p) @wrapper(:KSPGetPC,PetscErrorCode,(KSP,Ptr{PC}),(ksp,pc),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPGetPC.html") @wrapper(:PCSetType,PetscErrorCode,(PC,PCType),(pc,typ),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCSetType.html") + +""" +Julia alias for the `SNES` C type. + +See [PETSc manual](https://petsc.org/release/docs/manualpages/SNES/SNES.html). +""" +struct SNES + ptr::Ptr{Cvoid} +end +SNES() = SNES(Ptr{Cvoid}()) +Base.convert(::Type{SNES},p::Ptr{Cvoid}) = SNES(p) + +const SNESType = Cstring +const SNESNEWTONLS = "newtonls" +const SNESNEWTONTR = "newtontr" +const SNESPYTHON = "python" +const SNESNRICHARDSON = "nrichardson" +const SNESKSPONLY = "ksponly" +const SNESKSPTRANSPOSEONLY = "ksptransposeonly" +const SNESVINEWTONRSLS = "vinewtonrsls" +const SNESVINEWTONSSLS = "vinewtonssls" +const SNESNGMRES = "ngmres" +const SNESQN = "qn" +const SNESSHELL = "shell" +const SNESNGS = "ngs" +const SNESNCG = "ncg" +const SNESFAS = "fas" +const SNESMS = "ms" +const SNESNASM = "nasm" +const SNESANDERSON = "anderson" +const SNESASPIN = "aspin" +const SNESCOMPOSITE = "composite" +const SNESPATCH = "patch" + + +@wrapper(:SNESCreate,PetscErrorCode,(MPI.Comm,Ptr{SNES}),(comm,snes),"https://petsc.org/release/docs/manualpages/SNES/SNESCreate.html") +@wrapper(:SNESSetFunction,PetscErrorCode,(SNES,Vec,Ptr{Cvoid},Ptr{Cvoid}),(snes,vec,fptr,ctx),"https://petsc.org/release/docs/manualpages/SNES/SNESSetFunction.html") +@wrapper(:SNESSetJacobian,PetscErrorCode,(SNES,Mat,Mat,Ptr{Cvoid},Ptr{Cvoid}),(snes,A,P,jacptr,ctx),"https://petsc.org/release/docs/manualpages/SNES/SNESSetJacobian.html") +@wrapper(:SNESSolve,PetscErrorCode,(SNES,Vec,Vec),(snes,b,x),"https://petsc.org/release/docs/manualpages/SNES/SNESSolve.html") +@wrapper(:SNESDestroy,PetscErrorCode,(Ptr{SNES},),(snes,),"https://petsc.org/release/docs/manualpages/SNES/SNESDestroy.html") +@wrapper(:SNESSetFromOptions,PetscErrorCode,(SNES,),(snes,),"https://petsc.org/release/docs/manualpages/SNES/SNESSetFromOptions.html") +@wrapper(:SNESView,PetscErrorCode,(SNES,PetscViewer),(snes,viewer),"https://petsc.org/release/docs/manualpages/SNES/SNESView.html") +@wrapper(:SNESSetType,PetscErrorCode,(SNES,SNESType),(snes,type),"https://petsc.org/release/docs/ +manualpages/SNES/SNESSetType.html") + end # module diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index 84712ff..929d518 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -194,6 +194,19 @@ function PETScMatrix(csr::SparseMatrixCSR{0,PetscScalar,PetscInt}) Init(A) end +function PETScMatrix(a::AbstractMatrix{PetscScalar}) + m, n = size(a) + i = [PetscInt(n*(i-1)) for i=1:m+1] + j = [PetscInt(j-1) for i=1:m for j=1:n] + v = [ a[i,j] for i=1:m for j=1:n] + A = PETScMatrix() + A.ownership = m + @check_error_code PETSC.MatCreateSeqAIJWithArrays(MPI.COMM_SELF,m,n,i,j,v,A.mat) + @check_error_code PETSC.MatAssemblyBegin(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) + Init(A) +end + function Base.similar(::PETScMatrix,::Type{PetscScalar},ax::Tuple{Int,Int}) PETScMatrix(ax[1],ax[2]) end @@ -313,4 +326,3 @@ function LinearAlgebra.norm(a::PETScVector, p::Real=2) @check_error_code PETSC.VecNorm(a.vec[],nt,val) Float64(val[]) end - diff --git a/src/PETScLinearSolvers.jl b/src/PETScLinearSolvers.jl index 9db7a48..75d6ba2 100644 --- a/src/PETScLinearSolvers.jl +++ b/src/PETScLinearSolvers.jl @@ -4,10 +4,10 @@ struct PETScLinearSolver{F} <: LinearSolver comm::MPI.Comm end -from_options(ksp) = @check_error_code PETSC.KSPSetFromOptions(ksp[]) +ksp_from_options(ksp) = @check_error_code PETSC.KSPSetFromOptions(ksp[]) function PETScLinearSolver(comm::MPI.Comm) - PETScLinearSolver(from_options,comm) + PETScLinearSolver(ksp_from_options,comm) end PETScLinearSolver() = PETScLinearSolver(MPI.COMM_WORLD) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl new file mode 100644 index 0000000..a3f3bd2 --- /dev/null +++ b/src/PETScNonlinearSolvers.jl @@ -0,0 +1,139 @@ + +mutable struct PETScNonlinearSolver <: NonlinearSolver + comm::MPI.Comm + snes::Ref{SNES} + + op + + # Julia LA data structures + x_julia_vec + res_julia_vec + jac_julia_mat_A + jac_julia_mat_P + + # Counterpart PETSc data structures + x_petsc_vec + res_petsc_vec + jac_petsc_mat_A + jac_petsc_mat_P +end + +function snes_residual(csnes::Ptr{Cvoid}, + cx::Ptr{Cvoid}, + cfx::Ptr{Cvoid}, + ctx::Ptr{Cvoid})::PetscInt + nls = unsafe_pointer_to_objref(ctx) + x = Vec(cx) + fx = Vec(cfx) + + # 1. Transfer cx to Julia data structures + # Extract pointer to array of values out of cx and put it in a PVector + # xxx + + # 2. Evaluate residual into Julia data structures + residual!(nls.res_julia_vec, nls.op, nls.x_julia_vec) + + # 3. Transfer Julia residual to PETSc residual (cfx) + # + # xxx + + return PetscInt(0) +end + +function snes_jacobian(csnes:: Ptr{Cvoid}, + cx :: Ptr{Cvoid}, + cA :: Ptr{Cvoid}, + cP :: Ptr{Cvoid}, + ctx::Ptr{Cvoid})::PetscInt + + nls = unsafe_pointer_to_objref(ctx) + + # 1. Transfer cx to Julia data structures + # Extract pointer to array of values out of cx and put it in a PVector + # xxx + + # 2. + jacobian!(nls.jac_julia_mat_A,nls.op,nls.x_julia_vec) + + # 3. Transfer nls.jac_julia_mat_A/P to PETSc (cA/cP) + # xxx + + return PetscInt(0) +end + +function PETScNonlinearSolver(setup,comm) + @assert Threads.threadid() == 1 + _NREFS[] += 1 + snes_ref=Ref{SNES}() + @check_error_code PETSC.SNESCreate(comm,snes_ref) + setup(snes_ref) + snes=PETScNonlinearSolver(comm, snes_ref, + nothing, + nothing,nothing,nothing,nothing, + nothing,nothing,nothing,nothing) + finalizer(Finalize, snes) + snes +end + +function Finalize(nls::PETScNonlinearSolver) + if GridapPETSc.Initialized() + @check_error_code PETSC.SNESDestroy(nls.snes) + @assert Threads.threadid() == 1 + _NREFS[] -= 1 + end + nothing +end +snes_from_options(snes) = @check_error_code PETSC.SNESSetFromOptions(snes[]) + +function PETScNonlinearSolver(comm::MPI.Comm) + PETScNonlinearSolver(snes_from_options,comm) +end + +PETScNonlinearSolver(setup::Function) = PETScNonlinearSolver(setup,MPI.COMM_WORLD) +PETScNonlinearSolver() = PETScNonlinearSolver(MPI.COMM_WORLD) + +function _set_petsc_residual_function!(nls::PETScNonlinearSolver) + ctx = pointer_from_objref(nls) + fptr = @cfunction(snes_residual, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid})) + PETSC.SNESSetFunction(nls.snes[],nls.res_petsc_vec.vec[],fptr,ctx) +end + +function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver) + ctx = pointer_from_objref(nls) + fptr = @cfunction(snes_jacobian, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid},Ptr{Cvoid})) + PETSC.SNESSetJacobian(nls.snes[],nls.jac_petsc_mat_A.mat[],nls.jac_petsc_mat_A.mat[],fptr,ctx) +end + +function _setup_before_solve!(x,nls::PETScNonlinearSolver,op) + # TO-DO: allocate_residual_and_jacobian + res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) + + + # Does this perform a copy? + res_petsc_vec = PETScVector(res_julia_vec) + x_petsc_vec = PETScVector(x) + jac_petsc_mat_A = PETScMatrix(jac_julia_mat_A) + println(jac_julia_mat_A) + PETSC.@check_error_code PETSC.MatView(jac_petsc_mat_A.mat[],PETSC.@PETSC_VIEWER_STDOUT_WORLD) + + nls.op = op + + nls.res_julia_vec = res_julia_vec + nls.x_julia_vec = copy(res_julia_vec) + nls.x_petsc_vec = x_petsc_vec + nls.jac_julia_mat_A = jac_julia_mat_A + nls.jac_julia_mat_P = jac_julia_mat_A + + nls.res_petsc_vec = res_petsc_vec + nls.jac_petsc_mat_A = jac_petsc_mat_A + nls.jac_petsc_mat_P = jac_petsc_mat_A + + _set_petsc_residual_function!(nls) + _set_petsc_jacobian_function!(nls) +end + +function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) + _setup_before_solve!(x,nls,op) + @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,nls.x_petsc_vec.vec[]) + nothing +end diff --git a/test/PETScNonlinearSolversTests.jl b/test/PETScNonlinearSolversTests.jl new file mode 100644 index 0000000..e4da550 --- /dev/null +++ b/test/PETScNonlinearSolversTests.jl @@ -0,0 +1,25 @@ +module PETScNonlinearSolversTests + +using Gridap +using GridapPETSc +using Test + +options = "-snes_type newtonls -snes_monitor" +GridapPETSc.Init(args=split(options)) + +op = Gridap.Algebra.NonlinearOperatorMock() +nls = PETScNonlinearSolver() + +x0 = zero_initial_guess(op) +x = [1.0, 3.0] +Gridap.Algebra.test_nonlinear_solver(nls,op,x0,x) + +x0 = [2.1,2.9] +x = [2.0, 3.0] +Gridap.Algebra.test_nonlinear_solver(nls,op,x0,x) + +x0 = zero_initial_guess(op) +cache = solve!(x0,nls,op) + +GridapPETSc.Finalize(nls) +end # module From 820c4096c338d205fce27ae440f72b20bff86513 Mon Sep 17 00:00:00 2001 From: amartin Date: Tue, 26 Oct 2021 22:53:47 +1100 Subject: [PATCH 03/14] SNES PETSc solver working in a serial context --- src/GridapPETSc.jl | 2 + src/PETSC.jl | 11 ++ src/PETScArrays.jl | 35 +++- src/PETScNonlinearSolvers.jl | 247 ++++++++++++++++++++++------- test/PETScArraysTests.jl | 10 +- test/PETScNonlinearSolversTests.jl | 5 +- test/PartitionedArraysTests.jl | 17 ++ test/runtests.jl | 2 + 8 files changed, 267 insertions(+), 62 deletions(-) diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index dbdd563..ed5735b 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -74,6 +74,8 @@ export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC include("Environment.jl") export PETScVector +export get_local_oh_vector +export get_local_vector, restore_local_vector! export PETScMatrix export petsc_sparse include("PETScArrays.jl") diff --git a/src/PETSC.jl b/src/PETSC.jl index 5e2a258..3677001 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -197,7 +197,15 @@ Base.convert(::Type{Vec},p::Ptr{Cvoid}) = Vec(p) @wrapper(:VecSetValues,PetscErrorCode,(Vec,PetscInt,Ptr{PetscInt},Ptr{PetscScalar},InsertMode),(x,ni,ix,y,iora),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecSetValues.html") @wrapper(:VecGetValues,PetscErrorCode,(Vec,PetscInt,Ptr{PetscInt},Ptr{PetscScalar}),(x,ni,ix,y),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecGetValues.html") @wrapper(:VecGetArray,PetscErrorCode,(Vec,Ptr{Ptr{PetscScalar}}),(x,a),"https://petsc.org/release/docs/manualpages/Vec/VecGetArray.html") +@wrapper(:VecGetArrayRead,PetscErrorCode,(Vec,Ptr{Ptr{PetscScalar}}),(x,a),"https://petsc.org/ +release/docs/manualpages/Vec/VecGetArrayRead.html") +@wrapper(:VecGetArrayWrite,PetscErrorCode,(Vec,Ptr{Ptr{PetscScalar}}),(x,a),"https://petsc.org/release/docs/manualpages/Vec/VecGetArrayWrite.html") +@wrapper(:VecRestoreArray,PetscErrorCode,(Vec,Ptr{Ptr{PetscScalar}}),(x,a),"https://petsc.org/release/docs/manualpages/Vec/VecRestoreArray.html") +@wrapper(:VecRestoreArrayRead,PetscErrorCode,(Vec,Ptr{Ptr{PetscScalar}}),(x,a),"https://petsc.org/ +release/docs/manualpages/Vec/VecRestoreArrayRead.html") +@wrapper(:VecRestoreArrayWrite,PetscErrorCode,(Vec,Ptr{Ptr{PetscScalar}}),(x,a),"https://petsc.org/release/docs/manualpages/Vec/VecRestoreArrayWrite.html") @wrapper(:VecGetSize,PetscErrorCode,(Vec,Ptr{PetscInt}),(vec,n),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecGetSize.html") +@wrapper(:VecGetLocalSize,PetscErrorCode,(Vec,Ptr{PetscInt}),(vec,n),"https://petsc.org/release/docs/manualpages/Vec/VecGetLocalSize.html") @wrapper(:VecAssemblyBegin,PetscErrorCode,(Vec,),(vec,),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecAssemblyBegin.html") @wrapper(:VecAssemblyEnd,PetscErrorCode,(Vec,),(vec,),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecAssemblyEnd.html") @wrapper(:VecPlaceArray,PetscErrorCode,(Vec,Ptr{PetscScalar}),(vec,array),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecPlaceArray.html") @@ -211,6 +219,9 @@ Base.convert(::Type{Vec},p::Ptr{Cvoid}) = Vec(p) @wrapper(:VecAXPBY,PetscErrorCode,(Vec,PetscScalar,PetscScalar,Vec),(y,alpha,beta,x),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecAXPBY.html") @wrapper(:VecSetOption,PetscErrorCode,(Vec,VecOption,PetscBool),(x,op,flg),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecSetOption.html") @wrapper(:VecNorm,PetscErrorCode,(Vec,NormType,Ptr{PetscReal}),(x,typ,val),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Vec/VecNorm.html") +@wrapper(:VecGhostGetLocalForm,PetscErrorCode,(Vec,Ptr{Vec}),(g,l),"https://petsc.org/release/docs/ +manualpages/Vec/VecGhostGetLocalForm.html") +@wrapper(:VecGhostRestoreLocalForm,PetscErrorCode,(Vec,Ptr{Vec}),(g,l),"https://petsc.org/release/docs/manualpages/Vec/VecGhostRestoreLocalForm.html") # Matrix related functions diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index 929d518..8fbc172 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -106,6 +106,39 @@ function Base.convert(::Type{PETScVector},a::AbstractVector) PETScVector(array) end +function get_local_oh_vector(a::PETScVector) + v=PETScVector() + @check_error_code PETSC.VecGhostGetLocalForm(a.vec[],v.vec) + if v.vec[] != C_NULL # a is a ghosted vector + v.ownership=a + Init(v) + return v + else # a is NOT a ghosted vector + return get_local_vector(a) + end +end + +function _local_size(a::PETScVector) + r_sz = Ref{PetscInt}() + @check_error_code PETSC.VecGetLocalSize(a.vec[], r_sz) + r_sz[] +end + +# This function works with either ghosted or non-ghosted MPI vectors. +# In the case of a ghosted vector it solely returns the locally owned +# entries. +function get_local_vector(a::PETScVector) + r_pv = Ref{Ptr{PetscScalar}}() + @check_error_code PETSC.VecGetArray(a.vec[], r_pv) + v = unsafe_wrap(Array, r_pv[], _local_size(a); own = false) + return v +end + +function restore_local_vector!(v::Array,a::PETScVector) + @check_error_code PETSC.VecRestoreArray(a.vec[], Ref(pointer(v))) + nothing +end + # Matrix mutable struct PETScMatrix <: AbstractMatrix{PetscScalar} @@ -200,7 +233,7 @@ function PETScMatrix(a::AbstractMatrix{PetscScalar}) j = [PetscInt(j-1) for i=1:m for j=1:n] v = [ a[i,j] for i=1:m for j=1:n] A = PETScMatrix() - A.ownership = m + A.ownership = a @check_error_code PETSC.MatCreateSeqAIJWithArrays(MPI.COMM_SELF,m,n,i,j,v,A.mat) @check_error_code PETSC.MatAssemblyBegin(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) @check_error_code PETSC.MatAssemblyEnd(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index a3f3bd2..65ff7f2 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -2,40 +2,50 @@ mutable struct PETScNonlinearSolver <: NonlinearSolver comm::MPI.Comm snes::Ref{SNES} + initialized::Bool +end - op +mutable struct PETScNonlinearSolverCache{A,B,C,D} + initialized::Bool + op::NonlinearOperator # Julia LA data structures - x_julia_vec - res_julia_vec - jac_julia_mat_A - jac_julia_mat_P + x_julia_vec::A + res_julia_vec::A + jac_julia_mat_A::B + jac_julia_mat_P::B # Counterpart PETSc data structures - x_petsc_vec - res_petsc_vec - jac_petsc_mat_A - jac_petsc_mat_P + x_petsc_vec::C + res_petsc_vec::C + jac_petsc_mat_A::D + jac_petsc_mat_P::D + + function PETScNonlinearSolverCache(op::NonlinearOperator,x_julia_vec::A,res_julia_vec::A, + jac_julia_mat_A::B,jac_julia_mat_P::B, + x_petsc_vec::C,res_petsc_vec::C, + jac_petsc_mat_A::D,jac_petsc_mat_P::D) where {A,B,C,D} + + cache=new{A,B,C,D}(true,op,x_julia_vec,res_julia_vec,jac_julia_mat_A,jac_julia_mat_P, + x_petsc_vec,res_petsc_vec,jac_petsc_mat_A,jac_petsc_mat_P) + finalizer(Finalize,cache) + end end function snes_residual(csnes::Ptr{Cvoid}, cx::Ptr{Cvoid}, cfx::Ptr{Cvoid}, ctx::Ptr{Cvoid})::PetscInt - nls = unsafe_pointer_to_objref(ctx) - x = Vec(cx) - fx = Vec(cfx) + cache = unsafe_pointer_to_objref(ctx) # 1. Transfer cx to Julia data structures - # Extract pointer to array of values out of cx and put it in a PVector - # xxx + copy!(cache.x_julia_vec, Vec(cx)) # 2. Evaluate residual into Julia data structures - residual!(nls.res_julia_vec, nls.op, nls.x_julia_vec) + residual!(cache.res_julia_vec, cache.op, cache.x_julia_vec) # 3. Transfer Julia residual to PETSc residual (cfx) - # - # xxx + copy!(Vec(cfx), cache.res_julia_vec) return PetscInt(0) end @@ -46,17 +56,17 @@ function snes_jacobian(csnes:: Ptr{Cvoid}, cP :: Ptr{Cvoid}, ctx::Ptr{Cvoid})::PetscInt - nls = unsafe_pointer_to_objref(ctx) + cache = unsafe_pointer_to_objref(ctx) # 1. Transfer cx to Julia data structures # Extract pointer to array of values out of cx and put it in a PVector - # xxx + copy!(cache.x_julia_vec, Vec(cx)) # 2. - jacobian!(nls.jac_julia_mat_A,nls.op,nls.x_julia_vec) + jacobian!(cache.jac_julia_mat_A,cache.op,cache.x_julia_vec) # 3. Transfer nls.jac_julia_mat_A/P to PETSc (cA/cP) - # xxx + copy!(Mat(cA), cache.jac_julia_mat_A) return PetscInt(0) end @@ -67,22 +77,33 @@ function PETScNonlinearSolver(setup,comm) snes_ref=Ref{SNES}() @check_error_code PETSC.SNESCreate(comm,snes_ref) setup(snes_ref) - snes=PETScNonlinearSolver(comm, snes_ref, - nothing, - nothing,nothing,nothing,nothing, - nothing,nothing,nothing,nothing) + snes=PETScNonlinearSolver(comm,snes_ref,true) finalizer(Finalize, snes) snes end function Finalize(nls::PETScNonlinearSolver) - if GridapPETSc.Initialized() + if GridapPETSc.Initialized() && nls.initialized @check_error_code PETSC.SNESDestroy(nls.snes) @assert Threads.threadid() == 1 + nls.initialized=false _NREFS[] -= 1 end nothing end + +function Finalize(cache::PETScNonlinearSolverCache) + if GridapPETSc.Initialized() && cache.initialized + GridapPETSc.Finalize(cache.x_petsc_vec) + GridapPETSc.Finalize(cache.res_petsc_vec) + GridapPETSc.Finalize(cache.jac_petsc_mat_A) + if !(cache.jac_petsc_mat_P === cache.jac_petsc_mat_A) + GridapPETSc.Finalize(cache.jac_petsc_mat_P) + end + cache.initialized=false + end +end + snes_from_options(snes) = @check_error_code PETSC.SNESSetFromOptions(snes[]) function PETScNonlinearSolver(comm::MPI.Comm) @@ -92,48 +113,164 @@ end PETScNonlinearSolver(setup::Function) = PETScNonlinearSolver(setup,MPI.COMM_WORLD) PETScNonlinearSolver() = PETScNonlinearSolver(MPI.COMM_WORLD) -function _set_petsc_residual_function!(nls::PETScNonlinearSolver) - ctx = pointer_from_objref(nls) +function _set_petsc_residual_function!(nls::PETScNonlinearSolver, cache) + ctx = pointer_from_objref(cache) fptr = @cfunction(snes_residual, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid})) - PETSC.SNESSetFunction(nls.snes[],nls.res_petsc_vec.vec[],fptr,ctx) + PETSC.SNESSetFunction(nls.snes[],cache.res_petsc_vec.vec[],fptr,ctx) end -function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver) - ctx = pointer_from_objref(nls) +function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver, cache) + ctx = pointer_from_objref(cache) fptr = @cfunction(snes_jacobian, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid},Ptr{Cvoid})) - PETSC.SNESSetJacobian(nls.snes[],nls.jac_petsc_mat_A.mat[],nls.jac_petsc_mat_A.mat[],fptr,ctx) + PETSC.SNESSetJacobian(nls.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) end -function _setup_before_solve!(x,nls::PETScNonlinearSolver,op) - # TO-DO: allocate_residual_and_jacobian + +function _setup_cache(x,nls::PETScNonlinearSolver,op) res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) + res_petsc_vec = PETScVector(res_julia_vec) + x_petsc_vec = PETScVector(x) + jac_petsc_mat_A = PETScMatrix(jac_julia_mat_A) + x_julia_vec = copy(res_julia_vec) + PETScNonlinearSolverCache(op, x_julia_vec,res_julia_vec, + jac_julia_mat_A,jac_julia_mat_A, + x_petsc_vec,res_petsc_vec, + jac_petsc_mat_A, jac_petsc_mat_A) +end - # Does this perform a copy? - res_petsc_vec = PETScVector(res_julia_vec) - x_petsc_vec = PETScVector(x) - jac_petsc_mat_A = PETScMatrix(jac_julia_mat_A) - println(jac_julia_mat_A) - PETSC.@check_error_code PETSC.MatView(jac_petsc_mat_A.mat[],PETSC.@PETSC_VIEWER_STDOUT_WORLD) +function Algebra.solve!(x::T, + nls::PETScNonlinearSolver, + op::NonlinearOperator, + cache::PETScNonlinearSolverCache{<:T}) where T <: AbstractVector - nls.op = op + @assert cache.op === op + @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) + copy!(x,cache.x_petsc_vec.vec[]) + cache +end - nls.res_julia_vec = res_julia_vec - nls.x_julia_vec = copy(res_julia_vec) - nls.x_petsc_vec = x_petsc_vec - nls.jac_julia_mat_A = jac_julia_mat_A - nls.jac_julia_mat_P = jac_julia_mat_A - nls.res_petsc_vec = res_petsc_vec - nls.jac_petsc_mat_A = jac_petsc_mat_A - nls.jac_petsc_mat_P = jac_petsc_mat_A +function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) + cache=_setup_cache(x,nls,op) + _set_petsc_residual_function!(nls,cache) + _set_petsc_jacobian_function!(nls,cache) + @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) + copy!(x,cache.x_petsc_vec.vec[]) + cache +end - _set_petsc_residual_function!(nls) - _set_petsc_jacobian_function!(nls) +# ===== TO DO: MOVE Base.copy! overloads below TO APPROPRIATE SOURCE FILES + +function Base.copy!(a::AbstractVector,petsc_vec::Vec) + aux=PETScVector() + aux.vec[] = petsc_vec.ptr + Base.copy!(a,aux) end -function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) - _setup_before_solve!(x,nls,op) - @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,nls.x_petsc_vec.vec[]) - nothing +function Base.copy!(petsc_vec::Vec,a::AbstractVector) + aux=PETScVector() + aux.vec[] = petsc_vec.ptr + Base.copy!(aux,a) +end + +function Base.copy!(pvec::PVector,petscvec::PETScVector) + map_parts(pvec) do values + lg=get_local_oh_vector(petscvec) + if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector + @assert pvec.rows.ghost + lx=get_local_vector(lg) + @assert length(lx)==length(values) + values .= lx + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petsc_vec is NOT a ghosted vector + @assert !pvec.rows.ghost + @assert length(lg)==length(values) + values .= lg + restore_local_vector!(petscvec,lg) + end + end + pvec +end + +function Base.copy!(petscvec::PETScVector,pvec::PVector) + map_parts(pvec) do values + lg=get_local_oh_vector(petscvec) + if (isa(lg,PETScVector)) # petscvec is a ghosted vector + @assert pvec.rows.ghost + lx=get_local_vector(lg) + @assert length(lx)==length(values) + lx .= values + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petscvec is NOT a ghosted vector + @assert !pvec.rows.ghost + @assert length(lg)==length(values) + lg .= values + restore_local_vector!(lg,petscvec) + end + end + pvec +end + +function Base.copy!(vec::AbstractVector,petscvec::PETScVector) + lg=get_local_oh_vector(petscvec) + if isa(lg,PETScVector) # petscvec is a ghosted vector + lx=get_local_vector(lg) + @assert length(lx)==length(vec) + vec .= lx + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petscvec is NOT a ghosted vector + @assert length(lg)==length(vec) + vec .= lg + restore_local_vector!(lg,petscvec) + end +end + +function Base.copy!(petscvec::PETScVector,vec::AbstractVector) + lg=get_local_oh_vector(petscvec) + if isa(lg,PETScVector) # petscvec is a ghosted vector + lx=get_local_vector(lg) + @assert length(lx)==length(vec) + lx .= vec + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petscvec is NOT a ghosted vector + @assert length(lg)==length(vec) + lg .= vec + restore_local_vector!(lg,petscvec) + end +end + +function Base.copy!(petscmat::Mat,a::AbstractMatrix) + aux=PETScMatrix() + aux.mat[] = petscmat.ptr + Base.copy!(aux,a) +end + + +function Base.copy!(petscmat::PETScMatrix,mat::AbstractMatrix) + n = size(mat)[2] + cols = [PetscInt(j-1) for j=1:n] + row = Vector{PetscInt}(undef,1) + vals = Vector{eltype(mat)}(undef,n) + for i=1:size(mat)[1] + row[1]=PetscInt(i-1) + vals .= view(mat,i,:) + PETSC.MatSetValues(petscmat.mat[], + PetscInt(1), + row, + n, + cols, + vals, + PETSC.INSERT_VALUES) + end + @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[] , PETSC.MAT_FINAL_ASSEMBLY) +end + +function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) + @notimplemented end diff --git a/test/PETScArraysTests.jl b/test/PETScArraysTests.jl index 583c196..77f5faf 100644 --- a/test/PETScArraysTests.jl +++ b/test/PETScArraysTests.jl @@ -8,15 +8,15 @@ using GridapPETSc: PetscScalar, PetscInt using LinearAlgebra options = "-info" -GridapPETSc.with(args=split(options)) do +GridapPETSc.with(args=split(options)) do n = 10 v = PETScVector(n) - + @test length(v) == n v[4] = 30 @test 30 == v[4] - + s = similar(v,PetscScalar,4) w = similar(v) @@ -35,7 +35,7 @@ GridapPETSc.with(args=split(options)) do a = PETScVector(10) b = convert(PETScVector,a) @test a === b - + m = 4 n = 5 A = PETScMatrix(m,n) @@ -45,7 +45,7 @@ GridapPETSc.with(args=split(options)) do @test A[1,3] == 5 @test A[3,5] == 7 display(A) - + B = similar(A,PetscScalar,3,2) @test typeof(A) == typeof(B) @test size(B) == (3,2) diff --git a/test/PETScNonlinearSolversTests.jl b/test/PETScNonlinearSolversTests.jl index e4da550..4374fda 100644 --- a/test/PETScNonlinearSolversTests.jl +++ b/test/PETScNonlinearSolversTests.jl @@ -4,7 +4,7 @@ using Gridap using GridapPETSc using Test -options = "-snes_type newtonls -snes_monitor" +options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -pc_type cholesky -ksp_type preonly -snes_converged_reason" GridapPETSc.Init(args=split(options)) op = Gridap.Algebra.NonlinearOperatorMock() @@ -21,5 +21,8 @@ Gridap.Algebra.test_nonlinear_solver(nls,op,x0,x) x0 = zero_initial_guess(op) cache = solve!(x0,nls,op) +GridapPETSc.Finalize(cache) GridapPETSc.Finalize(nls) +GridapPETSc.Finalize() + end # module diff --git a/test/PartitionedArraysTests.jl b/test/PartitionedArraysTests.jl index 8c6f143..d50dfd6 100644 --- a/test/PartitionedArraysTests.jl +++ b/test/PartitionedArraysTests.jl @@ -60,6 +60,23 @@ function partitioned_tests(parts) end v = PVector(values,ids) x = PETScVector(v) + if (length(parts)>1) + map_parts(parts) do part + lg=get_local_oh_vector(x) + @test isa(lg,PETScVector) + lx=get_local_vector(lg) + if part==1 + @test length(lx)==5 + elseif part==2 + @test length(lx)==5 + elseif part==3 + @test length(lx)==3 + end + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + end + end + PETSC.@check_error_code PETSC.VecView(x.vec[],C_NULL) u = PVector(x,ids) exchange!(u) diff --git a/test/runtests.jl b/test/runtests.jl index 547decb..174ef28 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -12,6 +12,8 @@ using Test @time @testset "PETScLinearSolvers" begin include("PETScLinearSolversTests.jl") end +@time @testset "PETScNonLinearSolvers" begin include("PETScNonLinearSolversTests.jl") end + @time @testset "PETScAssembly" begin include("PETScAssemblyTests.jl") end @time @testset "PoissonDriver" begin include("PoissonDriver.jl") end From 8eeff967e92a0540c1d4a7a53a225a53ef7d9bdc Mon Sep 17 00:00:00 2001 From: amartin Date: Tue, 26 Oct 2021 23:39:48 +1100 Subject: [PATCH 04/14] Fixing failing test --- test/PartitionedArraysTests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/PartitionedArraysTests.jl b/test/PartitionedArraysTests.jl index d50dfd6..544d6cc 100644 --- a/test/PartitionedArraysTests.jl +++ b/test/PartitionedArraysTests.jl @@ -60,7 +60,7 @@ function partitioned_tests(parts) end v = PVector(values,ids) x = PETScVector(v) - if (length(parts)>1) + if (get_backend(v.values)==mpi) map_parts(parts) do part lg=get_local_oh_vector(x) @test isa(lg,PETScVector) From 854f48c05a241e45f70983ce35ca614f7fbc8226 Mon Sep 17 00:00:00 2001 From: amartin Date: Tue, 26 Oct 2021 23:48:00 +1100 Subject: [PATCH 05/14] Fixing typo --- test/runtests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/runtests.jl b/test/runtests.jl index 174ef28..7689270 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -12,7 +12,7 @@ using Test @time @testset "PETScLinearSolvers" begin include("PETScLinearSolversTests.jl") end -@time @testset "PETScNonLinearSolvers" begin include("PETScNonLinearSolversTests.jl") end +@time @testset "PETScNonLinearSolvers" begin include("PETScNonlinearSolversTests.jl") end @time @testset "PETScAssembly" begin include("PETScAssemblyTests.jl") end From d945c3b2ecad0a940addb354ac7cc84ad942f238 Mon Sep 17 00:00:00 2001 From: amartin Date: Wed, 27 Oct 2021 22:32:55 +1100 Subject: [PATCH 06/14] * Developed pending Base.copy! overloads * Added tests for Base.copy! --- src/PETScArrays.jl | 73 +++++++++++++++++++++ src/PETScNonlinearSolvers.jl | 113 --------------------------------- src/PartitionedArrays.jl | 88 ++++++++++++++++++++++++- test/PartitionedArraysTests.jl | 99 +++++++++++++++++++---------- 4 files changed, 223 insertions(+), 150 deletions(-) diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index 8fbc172..0596df5 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -106,6 +106,50 @@ function Base.convert(::Type{PETScVector},a::AbstractVector) PETScVector(array) end +function Base.copy!(a::AbstractVector,petsc_vec::Vec) + aux=PETScVector() + aux.vec[] = petsc_vec.ptr + Base.copy!(a,aux) +end + +function Base.copy!(petsc_vec::Vec,a::AbstractVector) + aux=PETScVector() + aux.vec[] = petsc_vec.ptr + Base.copy!(aux,a) +end + +function Base.copy!(vec::AbstractVector,petscvec::PETScVector) + lg=get_local_oh_vector(petscvec) + if isa(lg,PETScVector) # petscvec is a ghosted vector + lx=get_local_vector(lg) + @assert length(lx)==length(vec) + vec .= lx + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petscvec is NOT a ghosted vector + @assert length(lg)==length(vec) + vec .= lg + restore_local_vector!(lg,petscvec) + end +end + +function Base.copy!(petscvec::PETScVector,vec::AbstractVector) + lg=get_local_oh_vector(petscvec) + if isa(lg,PETScVector) # petscvec is a ghosted vector + lx=get_local_vector(lg) + @assert length(lx)==length(vec) + lx .= vec + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petscvec is NOT a ghosted vector + @assert length(lg)==length(vec) + lg .= vec + restore_local_vector!(lg,petscvec) + end +end + + + function get_local_oh_vector(a::PETScVector) v=PETScVector() @check_error_code PETSC.VecGhostGetLocalForm(a.vec[],v.vec) @@ -271,6 +315,35 @@ function Base.copy(a::PETScMatrix) Init(v) end +function Base.copy!(petscmat::Mat,a::AbstractMatrix) + aux=PETScMatrix() + aux.mat[] = petscmat.ptr + Base.copy!(aux,a) +end + + +function Base.copy!(petscmat::PETScMatrix,mat::AbstractMatrix) + n = size(mat)[2] + cols = [PetscInt(j-1) for j=1:n] + row = Vector{PetscInt}(undef,1) + vals = Vector{eltype(mat)}(undef,n) + for i=1:size(mat)[1] + row[1]=PetscInt(i-1) + vals .= view(mat,i,:) + PETSC.MatSetValues(petscmat.mat[], + PetscInt(1), + row, + n, + cols, + vals, + PETSC.INSERT_VALUES) + end + @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[] , PETSC.MAT_FINAL_ASSEMBLY) +end + + + function Base.convert(::Type{PETScMatrix},a::PETScMatrix) a end diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 65ff7f2..48e615b 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -161,116 +161,3 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea end # ===== TO DO: MOVE Base.copy! overloads below TO APPROPRIATE SOURCE FILES - -function Base.copy!(a::AbstractVector,petsc_vec::Vec) - aux=PETScVector() - aux.vec[] = petsc_vec.ptr - Base.copy!(a,aux) -end - -function Base.copy!(petsc_vec::Vec,a::AbstractVector) - aux=PETScVector() - aux.vec[] = petsc_vec.ptr - Base.copy!(aux,a) -end - -function Base.copy!(pvec::PVector,petscvec::PETScVector) - map_parts(pvec) do values - lg=get_local_oh_vector(petscvec) - if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector - @assert pvec.rows.ghost - lx=get_local_vector(lg) - @assert length(lx)==length(values) - values .= lx - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petsc_vec is NOT a ghosted vector - @assert !pvec.rows.ghost - @assert length(lg)==length(values) - values .= lg - restore_local_vector!(petscvec,lg) - end - end - pvec -end - -function Base.copy!(petscvec::PETScVector,pvec::PVector) - map_parts(pvec) do values - lg=get_local_oh_vector(petscvec) - if (isa(lg,PETScVector)) # petscvec is a ghosted vector - @assert pvec.rows.ghost - lx=get_local_vector(lg) - @assert length(lx)==length(values) - lx .= values - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petscvec is NOT a ghosted vector - @assert !pvec.rows.ghost - @assert length(lg)==length(values) - lg .= values - restore_local_vector!(lg,petscvec) - end - end - pvec -end - -function Base.copy!(vec::AbstractVector,petscvec::PETScVector) - lg=get_local_oh_vector(petscvec) - if isa(lg,PETScVector) # petscvec is a ghosted vector - lx=get_local_vector(lg) - @assert length(lx)==length(vec) - vec .= lx - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petscvec is NOT a ghosted vector - @assert length(lg)==length(vec) - vec .= lg - restore_local_vector!(lg,petscvec) - end -end - -function Base.copy!(petscvec::PETScVector,vec::AbstractVector) - lg=get_local_oh_vector(petscvec) - if isa(lg,PETScVector) # petscvec is a ghosted vector - lx=get_local_vector(lg) - @assert length(lx)==length(vec) - lx .= vec - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petscvec is NOT a ghosted vector - @assert length(lg)==length(vec) - lg .= vec - restore_local_vector!(lg,petscvec) - end -end - -function Base.copy!(petscmat::Mat,a::AbstractMatrix) - aux=PETScMatrix() - aux.mat[] = petscmat.ptr - Base.copy!(aux,a) -end - - -function Base.copy!(petscmat::PETScMatrix,mat::AbstractMatrix) - n = size(mat)[2] - cols = [PetscInt(j-1) for j=1:n] - row = Vector{PetscInt}(undef,1) - vals = Vector{eltype(mat)}(undef,n) - for i=1:size(mat)[1] - row[1]=PetscInt(i-1) - vals .= view(mat,i,:) - PETSC.MatSetValues(petscmat.mat[], - PetscInt(1), - row, - n, - cols, - vals, - PETSC.INSERT_VALUES) - end - @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) - @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[] , PETSC.MAT_FINAL_ASSEMBLY) -end - -function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) - @notimplemented -end diff --git a/src/PartitionedArrays.jl b/src/PartitionedArrays.jl index a6e6501..b504e8a 100644 --- a/src/PartitionedArrays.jl +++ b/src/PartitionedArrays.jl @@ -9,7 +9,7 @@ end function PETScVector(v::PVector,::SequentialBackend) gid_to_value = zeros(eltype(v),length(v)) map_parts(v.values,v.rows.partition) do values,rows - @check isa(rows,IndexRange) "Not supported partition for PETSc vectors" # to be consistent with MPI + @check isa(rows,IndexRange) "Unsupported partition for PETSc vectors" # to be consistent with MPI oid_to_gid = view(rows.lid_to_gid,rows.oid_to_lid) oid_to_value = view(values,rows.oid_to_lid) gid_to_value[oid_to_gid] = oid_to_value @@ -21,8 +21,9 @@ function PETScVector(v::PVector,::MPIBackend) w = PETScVector() N = num_gids(v.rows) comm = v.values.comm # Not sure about this + map_parts(v.values,v.rows.partition) do lid_to_value, rows - @check isa(rows,IndexRange) "Not supported partition for PETSc vectors" + @check isa(rows,IndexRange) "Unsupported partition for PETSc vectors" array = convert(Vector{PetscScalar},lid_to_value) n = num_oids(rows) nghost = num_hids(rows) @@ -79,6 +80,55 @@ function PartitionedArrays.PVector(v::PETScVector,ids::PRange,::MPIBackend) PVector(values,ids) end + +function Base.copy!(pvec::PVector,petscvec::PETScVector) + if get_backend(pvec.values) == mpi + map_parts(pvec.values) do values + lg=get_local_oh_vector(petscvec) + if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector + @assert pvec.rows.ghost + lx=get_local_vector(lg) + @assert length(lx)==length(values) + values .= lx + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petsc_vec is NOT a ghosted vector + @assert !pvec.rows.ghost + @assert length(lg)==length(values) + values .= lg + restore_local_vector!(petscvec,lg) + end + end + elseif get_backend(pvec.values) == sequential + @notimplemented + end + pvec +end + +function Base.copy!(petscvec::PETScVector,pvec::PVector) + if get_backend(pvec.values) == mpi + map_parts(pvec.values) do values + lg=get_local_oh_vector(petscvec) + if (isa(lg,PETScVector)) # petscvec is a ghosted vector + @assert pvec.rows.ghost + lx=get_local_vector(lg) + @assert length(lx)==length(values) + lx .= values + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petscvec is NOT a ghosted vector + @assert !pvec.rows.ghost + @assert length(lg)==length(values) + lg .= values + restore_local_vector!(lg,petscvec) + end + end + elseif get_backend(pvec.values) == sequential + @notimplemented + end + petscvec +end + function PETScMatrix(a::PSparseMatrix) backend = get_backend(a.values) PETScMatrix(a,backend) @@ -120,4 +170,36 @@ function PETScMatrix(a::PSparseMatrix,::MPIBackend) b end - +function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) + map_parts(mat.values,mat.rows.partition,mat.cols.partition) do lmat, rdofs, cdofs + Tm = SparseMatrixCSR{0,PetscScalar,PetscInt} + csr = convert(Tm,lmat) + ia = csr.rowptr + ja = csr.colval + a = csr.nzval + m = csr.m + n = csr.n + maxnnz = maximum( ia[i+1]-ia[i] for i=1:m ) + row = Vector{PetscInt}(undef,1) + cols = Vector{PetscInt}(undef,maxnnz) + for i=1:m + row[1]=PetscInt(rdofs.lid_to_gid[i]-1) + current=1 + for j=ia[i]+1:ia[i+1] + col=ja[j]+1 + cols[current]=PetscInt(cdofs.lid_to_gid[ja[j]+1]-1) + current=current+1 + end + vals = view(a,ia[i]+1:ia[i+1]) + PETSC.MatSetValues(petscmat.mat[], + PetscInt(1), + row, + ia[i+1]-ia[i], + cols, + vals, + PETSC.INSERT_VALUES) + end + end + @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[] , PETSC.MAT_FINAL_ASSEMBLY) +end diff --git a/test/PartitionedArraysTests.jl b/test/PartitionedArraysTests.jl index 544d6cc..b9834b4 100644 --- a/test/PartitionedArraysTests.jl +++ b/test/PartitionedArraysTests.jl @@ -4,6 +4,7 @@ using GridapPETSc.PETSC using PartitionedArrays using SparseMatricesCSR using Test +using LinearAlgebra function partitioned_tests(parts) @@ -53,59 +54,89 @@ function partitioned_tests(parts) GridapPETSc.Init(args=split("-ksp_type gmres -ksp_monitor -pc_type jacobi")) + function test_get_local_vector(v::PVector,x::PETScVector) + if (get_backend(v.values)==mpi) + map_parts(parts) do part + lg=get_local_oh_vector(x) + @test isa(lg,PETScVector) + lx=get_local_vector(lg) + if part==1 + @test length(lx)==5 + elseif part==2 + @test length(lx)==5 + elseif part==3 + @test length(lx)==3 + end + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + end + end + end + ngids = 7 ids = PRange(parts,ngids,noids,firstgid,hid_to_gid,hid_to_part) values = map_parts(ids.partition) do ids 10.0*ids.lid_to_gid end + + function test_vectors(v::PVector,x::PETScVector) + PETSC.@check_error_code PETSC.VecView(x.vec[],C_NULL) + u = PVector(x,ids) + exchange!(u) + map_parts(u.values,v.values) do u,v + @test u ≈ v + end + end + v = PVector(values,ids) x = PETScVector(v) + test_vectors(v,x) + if (get_backend(v.values)==mpi) - map_parts(parts) do part - lg=get_local_oh_vector(x) - @test isa(lg,PETScVector) - lx=get_local_vector(lg) - if part==1 - @test length(lx)==5 - elseif part==2 - @test length(lx)==5 - elseif part==3 - @test length(lx)==3 - end - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - end - end + # Copy v into v1 to circumvent (potentia) aliasing of v and x + v1=copy(v) + fill!(v1,zero(eltype(v))) + copy!(v1,x) + test_vectors(v1,x) - PETSC.@check_error_code PETSC.VecView(x.vec[],C_NULL) - u = PVector(x,ids) - exchange!(u) - map_parts(u.values,v.values) do u,v - @test u == v + # Copy x into x1 to circumvent (potential) aliasing of v and x + x1=copy(x) + fill!(x1,PetscScalar(0.0)) + copy!(x1,v) + test_vectors(v,x1) + GridapPETSc.Finalize(x1) end + A = PSparseMatrix(I,J,V,ids,ids,ids=:global) display(A.values) B = PETScMatrix(A) PETSC.@check_error_code PETSC.MatView(B.mat[],PETSC.@PETSC_VIEWER_STDOUT_WORLD) - #TODO hide conversions and solver setup - solver = PETScLinearSolver() - ss = symbolic_setup(solver,B) - ns = numerical_setup(ss,B) - y = PETScVector(A*v) - x̂ = PETScVector(0.0,ids) - solve!(x̂,ns,y) - z = PVector(x̂,ids) - exchange!(z) - map_parts(z.values,v.values) do z,v - @test maximum(abs.(z-v)) < 1e-5 + function solve_system_and_check_solution(A::PSparseMatrix,B::PETScMatrix,v) + #TODO hide conversions and solver setup + solver = PETScLinearSolver() + ss = symbolic_setup(solver,B) + ns = numerical_setup(ss,B) + y = PETScVector(A*v) + x̂ = PETScVector(0.0,ids) + solve!(x̂,ns,y) + z = PVector(x̂,ids) + exchange!(z) + map_parts(z.values,v.values) do z,v + @test maximum(abs.(z-v)) < 1e-5 + end + GridapPETSc.Finalize(x̂) + GridapPETSc.Finalize(y) end + solve_system_and_check_solution(A,B,v) + + # Test that copy! works ok + LinearAlgebra.fillstored!(B,PetscScalar(0.0)) + copy!(B,A) + solve_system_and_check_solution(A,B,v) - GridapPETSc.Finalize(x̂) GridapPETSc.Finalize(B) - GridapPETSc.Finalize(y) - GridapPETSc.Finalize(ns) GridapPETSc.Finalize(x) GridapPETSc.Finalize() end From 66c7f4fe31520a039a19af4c19f3da45e92ecd22 Mon Sep 17 00:00:00 2001 From: amartin Date: Wed, 27 Oct 2021 22:39:58 +1100 Subject: [PATCH 07/14] Temporarily add GridapDistributed#master and Manifest --- .gitignore | 2 +- Manifest.toml | 522 ++++++++++++++++++++++++++++++++++++++++++++++++++ Project.toml | 1 + 3 files changed, 524 insertions(+), 1 deletion(-) create mode 100644 Manifest.toml diff --git a/.gitignore b/.gitignore index bd4ad51..9225b4a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ *.jl.cov *.jl.mem .DS_Store -Manifest.toml +#Manifest.toml /dev/ /docs/build/ /docs/site/ diff --git a/Manifest.toml b/Manifest.toml new file mode 100644 index 0000000..74f1fa0 --- /dev/null +++ b/Manifest.toml @@ -0,0 +1,522 @@ +# This file is machine-generated - editing it directly is not advised + +[[AbstractTrees]] +git-tree-sha1 = "03e0550477d86222521d254b741d470ba17ea0b5" +uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +version = "0.3.4" + +[[ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" + +[[ArrayInterface]] +deps = ["Compat", "IfElse", "LinearAlgebra", "Requires", "SparseArrays", "Static"] +git-tree-sha1 = "a8101545d6b15ff1ebc927e877e28b0ab4bc4f16" +uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" +version = "3.1.36" + +[[ArrayLayouts]] +deps = ["FillArrays", "LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "7a92ea1dd16472d18ca1ffcbb7b3cc67d7e78a3f" +uuid = "4c555306-a7a7-4459-81d9-ec55ddd5c99a" +version = "0.7.7" + +[[Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[BSON]] +git-tree-sha1 = "ebcd6e22d69f21249b7b8668351ebf42d6dc87a1" +uuid = "fbb218c0-5317-5bc6-957e-2ee96dd4b1f0" +version = "0.3.4" + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[BlockArrays]] +deps = ["ArrayLayouts", "FillArrays", "LinearAlgebra"] +git-tree-sha1 = "5524e27323cf4c4505699c3fb008c3f772269945" +uuid = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" +version = "0.16.9" + +[[ChainRulesCore]] +deps = ["Compat", "LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "3533f5a691e60601fe60c90d8bc47a27aa2907ec" +uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +version = "1.11.0" + +[[CodecZlib]] +deps = ["TranscodingStreams", "Zlib_jll"] +git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da" +uuid = "944b1d66-785c-5afd-91f1-9de20f533193" +version = "0.7.0" + +[[Combinatorics]] +git-tree-sha1 = "08c8b6831dc00bfea825826be0bc8336fc369860" +uuid = "861a8166-3701-5b0c-9a16-15d98fcdc6aa" +version = "1.0.2" + +[[CommonSubexpressions]] +deps = ["MacroTools", "Test"] +git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7" +uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" +version = "0.3.0" + +[[Compat]] +deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] +git-tree-sha1 = "31d0151f5716b655421d9d75b7fa74cc4e744df2" +uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" +version = "3.39.0" + +[[CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" + +[[DataStructures]] +deps = ["Compat", "InteractiveUtils", "OrderedCollections"] +git-tree-sha1 = "7d9d316f04214f7efdbb6398d545446e246eff02" +uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" +version = "0.18.10" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[DelimitedFiles]] +deps = ["Mmap"] +uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" + +[[DiffResults]] +deps = ["StaticArrays"] +git-tree-sha1 = "c18e98cba888c6c25d1c3b048e4b3380ca956805" +uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" +version = "1.0.3" + +[[DiffRules]] +deps = ["NaNMath", "Random", "SpecialFunctions"] +git-tree-sha1 = "7220bc21c33e990c14f4a9a319b1d242ebc5b269" +uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" +version = "1.3.1" + +[[Distances]] +deps = ["LinearAlgebra", "Statistics", "StatsAPI"] +git-tree-sha1 = "09d9eaef9ef719d2cd5d928a191dc95be2ec8059" +uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +version = "0.10.5" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[DocStringExtensions]] +deps = ["LibGit2"] +git-tree-sha1 = "b19534d1895d702889b219c382a6e18010797f0b" +uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" +version = "0.8.6" + +[[Downloads]] +deps = ["ArgTools", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" + +[[FastGaussQuadrature]] +deps = ["LinearAlgebra", "SpecialFunctions", "StaticArrays"] +git-tree-sha1 = "5829b25887e53fb6730a9df2ff89ed24baa6abf6" +uuid = "442a2c76-b920-505d-bb47-c5924d526838" +version = "0.4.7" + +[[FileIO]] +deps = ["Pkg", "Requires", "UUIDs"] +git-tree-sha1 = "3c041d2ac0a52a12a27af2782b34900d9c3ee68c" +uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" +version = "1.11.1" + +[[FillArrays]] +deps = ["LinearAlgebra", "Random", "SparseArrays", "Statistics"] +git-tree-sha1 = "8756f9935b7ccc9064c6eef0bff0ad643df733a3" +uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" +version = "0.12.7" + +[[FiniteDiff]] +deps = ["ArrayInterface", "LinearAlgebra", "Requires", "SparseArrays", "StaticArrays"] +git-tree-sha1 = "8b3c09b56acaf3c0e581c66638b85c8650ee9dca" +uuid = "6a86dc24-6348-571c-b903-95158fe2bd41" +version = "2.8.1" + +[[ForwardDiff]] +deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions", "StaticArrays"] +git-tree-sha1 = "63777916efbcb0ab6173d09a658fb7f2783de485" +uuid = "f6369f11-7733-5829-9624-2563aa707210" +version = "0.10.21" + +[[Gridap]] +deps = ["AbstractTrees", "BSON", "BlockArrays", "Combinatorics", "DocStringExtensions", "FastGaussQuadrature", "FileIO", "FillArrays", "ForwardDiff", "JLD2", "JSON", "LineSearches", "LinearAlgebra", "NLsolve", "NearestNeighbors", "QuadGK", "Random", "SparseArrays", "SparseMatricesCSR", "StaticArrays", "Test", "WriteVTK"] +git-tree-sha1 = "db94b3f4e47315b2b9c4ccc6d54aff32829051dd" +uuid = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e" +version = "0.17.3" + +[[GridapDistributed]] +deps = ["FillArrays", "Gridap", "LinearAlgebra", "MPI", "PartitionedArrays", "SparseArrays", "WriteVTK"] +git-tree-sha1 = "ecca67da64453f9f838615ae84f9e840acf44296" +repo-rev = "master" +repo-url = "https://github.com/gridap/GridapDistributed.jl" +uuid = "f9701e48-63b3-45aa-9a63-9bc6c271f355" +version = "0.2.0" + +[[IfElse]] +git-tree-sha1 = "28e837ff3e7a6c3cdb252ce49fb412c8eb3caeef" +uuid = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173" +version = "0.1.0" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[InverseFunctions]] +deps = ["Test"] +git-tree-sha1 = "f0c6489b12d28fb4c2103073ec7452f3423bd308" +uuid = "3587e190-3f89-42d0-90ee-14403ec27112" +version = "0.1.1" + +[[IrrationalConstants]] +git-tree-sha1 = "7fd44fd4ff43fc60815f8e764c0f352b83c49151" +uuid = "92d709cd-6900-40b7-9082-c6be49f344b6" +version = "0.1.1" + +[[IterativeSolvers]] +deps = ["LinearAlgebra", "Printf", "Random", "RecipesBase", "SparseArrays"] +git-tree-sha1 = "1a8c6237e78b714e901e406c096fc8a65528af7d" +uuid = "42fd0dbc-a981-5370-80f2-aaf504508153" +version = "0.9.1" + +[[JLD2]] +deps = ["DataStructures", "FileIO", "MacroTools", "Mmap", "Pkg", "Printf", "Reexport", "TranscodingStreams", "UUIDs"] +git-tree-sha1 = "46b7834ec8165c541b0b5d1c8ba63ec940723ffb" +uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819" +version = "0.4.15" + +[[JLLWrappers]] +deps = ["Preferences"] +git-tree-sha1 = "642a199af8b68253517b80bd3bfd17eb4e84df6e" +uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" +version = "1.3.0" + +[[JSON]] +deps = ["Dates", "Mmap", "Parsers", "Unicode"] +git-tree-sha1 = "8076680b162ada2a031f707ac7b4953e30667a37" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "0.21.2" + +[[LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" + +[[LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" + +[[LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[Libiconv_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "42b62845d70a619f063a7da093d995ec8e15e778" +uuid = "94ce4f54-9a6c-5748-9c1c-f9c7231a4531" +version = "1.16.1+1" + +[[LightXML]] +deps = ["Libdl", "XML2_jll"] +git-tree-sha1 = "e129d9391168c677cd4800f5c0abb1ed8cb3794f" +uuid = "9c8b4983-aa76-5018-a973-4c85ecc9e179" +version = "0.9.0" + +[[LineSearches]] +deps = ["LinearAlgebra", "NLSolversBase", "NaNMath", "Parameters", "Printf"] +git-tree-sha1 = "f27132e551e959b3667d8c93eae90973225032dd" +uuid = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" +version = "7.1.1" + +[[LinearAlgebra]] +deps = ["Libdl"] +uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" + +[[LogExpFunctions]] +deps = ["ChainRulesCore", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] +git-tree-sha1 = "6193c3815f13ba1b78a51ce391db8be016ae9214" +uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +version = "0.3.4" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[MPI]] +deps = ["Distributed", "DocStringExtensions", "Libdl", "MPICH_jll", "MicrosoftMPI_jll", "OpenMPI_jll", "Pkg", "Random", "Requires", "Serialization", "Sockets"] +git-tree-sha1 = "340d8dc89e1c85a846d3f38ee294bfdd1684055a" +uuid = "da04e1cc-30fd-572f-bb4f-1f8673147195" +version = "0.19.1" + +[[MPICH_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "09864c823da1a606dbc151534c1a134fd5506170" +uuid = "7cb0a576-ebde-5e09-9194-50597f1243b4" +version = "3.4.2+1" + +[[MacroTools]] +deps = ["Markdown", "Random"] +git-tree-sha1 = "5a5bc6bf062f0f95e62d0fe0a2d99699fed82dd9" +uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" +version = "0.5.8" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" + +[[MicrosoftMPI_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "bb2fe65544e6efd883bb2060088df7dfb7273b41" +uuid = "9237b28f-5490-5468-be7b-bb81f5f5e6cf" +version = "10.1.3+1" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" + +[[NLSolversBase]] +deps = ["DiffResults", "Distributed", "FiniteDiff", "ForwardDiff"] +git-tree-sha1 = "144bab5b1443545bc4e791536c9f1eacb4eed06a" +uuid = "d41bc354-129a-5804-8e4c-c37616107c6c" +version = "7.8.1" + +[[NLsolve]] +deps = ["Distances", "LineSearches", "LinearAlgebra", "NLSolversBase", "Printf", "Reexport"] +git-tree-sha1 = "019f12e9a1a7880459d0173c182e6a99365d7ac1" +uuid = "2774e3e8-f4cf-5e23-947b-6d7e65073b56" +version = "4.5.1" + +[[NaNMath]] +git-tree-sha1 = "bfe47e760d60b82b66b61d2d44128b62e3a369fb" +uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" +version = "0.3.5" + +[[NearestNeighbors]] +deps = ["Distances", "StaticArrays"] +git-tree-sha1 = "16baacfdc8758bc374882566c9187e785e85c2f0" +uuid = "b8a86587-4115-5ab1-83bc-aa920d37bbce" +version = "0.4.9" + +[[NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" + +[[OpenBLAS32_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "ba4a8f683303c9082e84afba96f25af3c7fb2436" +uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2" +version = "0.3.12+1" + +[[OpenLibm_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "05823500-19ac-5b8b-9628-191a04bc5112" + +[[OpenMPI_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "872077914c8a8cab9ea1430f338ae6b59258577d" +uuid = "fe0851c0-eecd-5654-98d4-656369965a5c" +version = "4.1.1+3" + +[[OpenSpecFun_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1" +uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" +version = "0.5.5+0" + +[[OrderedCollections]] +git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" +uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" +version = "1.4.1" + +[[PETSc_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "MPICH_jll", "MicrosoftMPI_jll", "OpenBLAS32_jll", "Pkg"] +git-tree-sha1 = "8384198eba24438cee406ec7ca8854acdcbbd2c8" +uuid = "8fa3689e-f0b9-5420-9873-adf6ccf46f2d" +version = "3.15.2+0" + +[[Parameters]] +deps = ["OrderedCollections", "UnPack"] +git-tree-sha1 = "34c0e9ad262e5f7fc75b10a9952ca7692cfc5fbe" +uuid = "d96e819e-fc66-5662-9728-84c9c7592b0a" +version = "0.12.3" + +[[Parsers]] +deps = ["Dates"] +git-tree-sha1 = "f19e978f81eca5fd7620650d7dbea58f825802ee" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "2.1.0" + +[[PartitionedArrays]] +deps = ["Distances", "IterativeSolvers", "LinearAlgebra", "MPI", "Printf", "SparseArrays", "SparseMatricesCSR"] +git-tree-sha1 = "fc308afeda353c9e8f8413b7cfe944615defa853" +uuid = "5a9dfac6-5c52-46f7-8278-5e2210713be9" +version = "0.2.4" + +[[Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Preferences]] +deps = ["TOML"] +git-tree-sha1 = "00cfd92944ca9c760982747e9a1d0d5d86ab1e5a" +uuid = "21216c6a-2e73-6563-6e65-726566657250" +version = "1.2.2" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[QuadGK]] +deps = ["DataStructures", "LinearAlgebra"] +git-tree-sha1 = "78aadffb3efd2155af139781b8a8df1ef279ea39" +uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc" +version = "2.4.2" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[RecipesBase]] +git-tree-sha1 = "44a75aa7a527910ee3d1751d1f0e4148698add9e" +uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" +version = "1.1.2" + +[[Reexport]] +git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b" +uuid = "189a3867-3050-52da-a836-e630ba90ab69" +version = "1.2.2" + +[[Requires]] +deps = ["UUIDs"] +git-tree-sha1 = "4036a3bd08ac7e968e27c203d45f5fff15020621" +uuid = "ae029012-a4dd-5104-9daa-d747884805df" +version = "1.1.3" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[SharedArrays]] +deps = ["Distributed", "Mmap", "Random", "Serialization"] +uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[SparseArrays]] +deps = ["LinearAlgebra", "Random"] +uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + +[[SparseMatricesCSR]] +deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"] +git-tree-sha1 = "ad906b39ce5e05ec509495dfc7b38d11ce9ab40b" +uuid = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1" +version = "0.6.5" + +[[SpecialFunctions]] +deps = ["ChainRulesCore", "IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"] +git-tree-sha1 = "f0bccf98e16759818ffc5d97ac3ebf87eb950150" +uuid = "276daf66-3868-5448-9aa4-cd146d93841b" +version = "1.8.1" + +[[Static]] +deps = ["IfElse"] +git-tree-sha1 = "e7bc80dc93f50857a5d1e3c8121495852f407e6a" +uuid = "aedffcd0-7271-4cad-89d0-dc628f76c6d3" +version = "0.4.0" + +[[StaticArrays]] +deps = ["LinearAlgebra", "Random", "Statistics"] +git-tree-sha1 = "3c76dde64d03699e074ac02eb2e8ba8254d428da" +uuid = "90137ffa-7385-5640-81b9-e52037218182" +version = "1.2.13" + +[[Statistics]] +deps = ["LinearAlgebra", "SparseArrays"] +uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" + +[[StatsAPI]] +git-tree-sha1 = "1958272568dc176a1d881acb797beb909c785510" +uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" +version = "1.0.0" + +[[SuiteSparse]] +deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"] +uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9" + +[[TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" + +[[Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" + +[[Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[TranscodingStreams]] +deps = ["Random", "Test"] +git-tree-sha1 = "216b95ea110b5972db65aa90f88d8d89dcb8851c" +uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" +version = "0.9.6" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[UnPack]] +git-tree-sha1 = "387c1f73762231e86e0c9c5443ce3b4a0a9a0c2b" +uuid = "3a884ed6-31ef-47d7-9d2a-63182c4928ed" +version = "1.0.2" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[WriteVTK]] +deps = ["Base64", "CodecZlib", "FillArrays", "LightXML", "TranscodingStreams"] +git-tree-sha1 = "3c6dd58eac1c7941b853790edf30dbfe8a6f57df" +uuid = "64499a7a-5c06-52f2-abe2-ccb03c286192" +version = "1.12.0" + +[[XML2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Libiconv_jll", "Pkg", "Zlib_jll"] +git-tree-sha1 = "1acf5bdf07aa0907e0a37d3718bb88d4b687b74a" +uuid = "02c8fc9c-b97f-50b9-bbe4-9be30ff0a78a" +version = "2.9.12+0" + +[[Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" + +[[nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" + +[[p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" diff --git a/Project.toml b/Project.toml index 8b2ad2b..aa79dce 100644 --- a/Project.toml +++ b/Project.toml @@ -5,6 +5,7 @@ version = "0.3.0" [deps] Gridap = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e" +GridapDistributed = "f9701e48-63b3-45aa-9a63-9bc6c271f355" Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195" From 0aa527d3dd7167083be3472004cbc09fca5fe67b Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 29 Oct 2021 00:45:07 +1100 Subject: [PATCH 08/14] PLaplacian MPI + PETSc driver working --- src/PETScNonlinearSolvers.jl | 21 +++++++++-- src/PartitionedArrays.jl | 40 ++++++++++++-------- test/PETScArraysTests.jl | 2 +- test/PETScAssemblyTests.jl | 2 +- test/PLaplacianDriver.jl | 60 ++++++++++++++++++++++++++++++ test/PLaplacianDriver_mpi.jl | 5 +++ test/PartitionedArraysTests_mpi.jl | 14 +------ test/mpiexec.jl | 11 ++++++ 8 files changed, 120 insertions(+), 35 deletions(-) create mode 100644 test/PLaplacianDriver.jl create mode 100644 test/PLaplacianDriver_mpi.jl create mode 100644 test/mpiexec.jl diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 48e615b..823c961 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -129,9 +129,21 @@ end function _setup_cache(x,nls::PETScNonlinearSolver,op) res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) res_petsc_vec = PETScVector(res_julia_vec) - x_petsc_vec = PETScVector(x) jac_petsc_mat_A = PETScMatrix(jac_julia_mat_A) - x_julia_vec = copy(res_julia_vec) + + # In a parallel MPI context, x is a vector with a data layout typically different from + # the one of res_julia_vec. On the one hand, x holds the free dof values of a FE + # Function, and thus has the data layout of the FE space (i.e., local DOFs + # include all DOFs touched by local cells, i.e., owned and ghost cells). + # On the other hand, res_petsc_vec has the data layout of the rows of the + # distributed linear system (e.g., local DoFs only include those touched for owned + # cells/facets during assembly, assuming the SubAssembledRows strategy). + # The following lines of code generate a version of x, namely, x_julia_vec, with the + # same data layout as the columns of jac_julia_mat_A, but the contents of x (the owned dofs, and # a subset of the ghost dofs). + x_julia_vec = similar(res_julia_vec,eltype(res_julia_vec),(axes(jac_julia_mat_A)[2],)) + copy!(x_julia_vec,x) + exchange!(x_julia_vec) + x_petsc_vec = PETScVector(x_julia_vec) PETScNonlinearSolverCache(op, x_julia_vec,res_julia_vec, jac_julia_mat_A,jac_julia_mat_A, @@ -147,6 +159,7 @@ function Algebra.solve!(x::T, @assert cache.op === op @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) copy!(x,cache.x_petsc_vec.vec[]) + exchange!(x) cache end @@ -157,7 +170,7 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea _set_petsc_jacobian_function!(nls,cache) @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) copy!(x,cache.x_petsc_vec.vec[]) + #@check_error_code PETSC.VecView(cache.x_petsc_vec.vec[],PETSC.@PETSC_VIEWER_STDOUT_WORLD) + exchange!(x) cache end - -# ===== TO DO: MOVE Base.copy! overloads below TO APPROPRIATE SOURCE FILES diff --git a/src/PartitionedArrays.jl b/src/PartitionedArrays.jl index b504e8a..eed0b3b 100644 --- a/src/PartitionedArrays.jl +++ b/src/PartitionedArrays.jl @@ -83,13 +83,15 @@ end function Base.copy!(pvec::PVector,petscvec::PETScVector) if get_backend(pvec.values) == mpi - map_parts(pvec.values) do values + map_parts(get_part_ids(pvec.values),pvec.values,pvec.rows.partition) do part, values, indices lg=get_local_oh_vector(petscvec) if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector + # Only copying owned DoFs. This should be followed by + # an exchange if the ghost DoFs of pvec are to be consumed @assert pvec.rows.ghost lx=get_local_vector(lg) - @assert length(lx)==length(values) - values .= lx + vvalues=view(values,indices.oid_to_lid) + vvalues .= lx[1:num_oids(indices)] restore_local_vector!(lx,lg) GridapPETSc.Finalize(lg) else # petsc_vec is NOT a ghosted vector @@ -107,13 +109,17 @@ end function Base.copy!(petscvec::PETScVector,pvec::PVector) if get_backend(pvec.values) == mpi - map_parts(pvec.values) do values + map_parts(pvec.values,pvec.rows.partition) do values, indices lg=get_local_oh_vector(petscvec) if (isa(lg,PETScVector)) # petscvec is a ghosted vector - @assert pvec.rows.ghost - lx=get_local_vector(lg) - @assert length(lx)==length(values) - lx .= values + lx=get_local_vector(lg) + if (pvec.rows.ghost) # pvec is a ghosted vector + @assert length(lx)==length(values) + lx .= values + else + vvalues=view(values,indices.oid_to_lid) + lx[1:num_oids(indices)] .= view(values,indices.oid_to_lid) + end restore_local_vector!(lx,lg) GridapPETSc.Finalize(lg) else # petscvec is NOT a ghosted vector @@ -171,7 +177,8 @@ function PETScMatrix(a::PSparseMatrix,::MPIBackend) end function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) - map_parts(mat.values,mat.rows.partition,mat.cols.partition) do lmat, rdofs, cdofs + parts=get_part_ids(mat.values) + map_parts(parts, mat.values,mat.rows.partition,mat.cols.partition) do part, lmat, rdofs, cdofs Tm = SparseMatrixCSR{0,PetscScalar,PetscInt} csr = convert(Tm,lmat) ia = csr.rowptr @@ -182,24 +189,25 @@ function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) maxnnz = maximum( ia[i+1]-ia[i] for i=1:m ) row = Vector{PetscInt}(undef,1) cols = Vector{PetscInt}(undef,maxnnz) - for i=1:m - row[1]=PetscInt(rdofs.lid_to_gid[i]-1) + for i=1:num_oids(rdofs) + lid=rdofs.oid_to_lid[i] + row[1]=PetscInt(rdofs.lid_to_gid[lid]-1) current=1 - for j=ia[i]+1:ia[i+1] + for j=ia[lid]+1:ia[lid+1] col=ja[j]+1 - cols[current]=PetscInt(cdofs.lid_to_gid[ja[j]+1]-1) + cols[current]=PetscInt(cdofs.lid_to_gid[col]-1) current=current+1 end - vals = view(a,ia[i]+1:ia[i+1]) + vals = view(a,ia[lid]+1:ia[lid+1]) PETSC.MatSetValues(petscmat.mat[], PetscInt(1), row, - ia[i+1]-ia[i], + ia[lid+1]-ia[lid], cols, vals, PETSC.INSERT_VALUES) end end @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) - @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[] , PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) end diff --git a/test/PETScArraysTests.jl b/test/PETScArraysTests.jl index 77f5faf..2a08165 100644 --- a/test/PETScArraysTests.jl +++ b/test/PETScArraysTests.jl @@ -1,4 +1,4 @@ -module PETScVectorsTests +module PETScArraysTests using GridapPETSc using Test diff --git a/test/PETScAssemblyTests.jl b/test/PETScAssemblyTests.jl index 9285df0..2de54bc 100644 --- a/test/PETScAssemblyTests.jl +++ b/test/PETScAssemblyTests.jl @@ -1,4 +1,4 @@ -module PETScArraysTests +module PETScAssemblyTests using Test using Gridap diff --git a/test/PLaplacianDriver.jl b/test/PLaplacianDriver.jl new file mode 100644 index 0000000..ff200cc --- /dev/null +++ b/test/PLaplacianDriver.jl @@ -0,0 +1,60 @@ +using Gridap +using Gridap.Algebra +using GridapDistributed +using PartitionedArrays +using GridapPETSc +using Test + +function main(parts) + options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -pc_type jacobi -ksp_type gmres -ksp_monitor -snes_converged_reason" + GridapPETSc.Init(args=split(options)) + + main(parts,FullyAssembledRows()) + main(parts,SubAssembledRows()) + + GridapPETSc.Finalize() +end + +function main(parts,strategy) + + domain = (0,4,0,4) + cells = (4,4) + model = CartesianDiscreteModel(parts,domain,cells) + + k = 1 + u((x,y)) = (x+y)^k + σ(∇u) =(1.0+∇u⋅∇u)*∇u + dσ(∇du,∇u) = 2*∇u⋅∇du + (1.0+∇u⋅∇u)*∇du + f(x) = -divergence(y->σ(∇(u,y)),x) + + Ω = Triangulation(strategy,model) + dΩ = Measure(Ω,2*k) + r(u,v) = ∫( ∇(v)⋅(σ∘∇(u)) - v*f )dΩ + j(u,du,v) = ∫( ∇(v)⋅(dσ∘(∇(du),∇(u))) )dΩ + + reffe = ReferenceFE(lagrangian,Float64,k) + V = TestFESpace(model,reffe,dirichlet_tags="boundary") + U = TrialFESpace(u,V) + + op = FEOperator(r,j,U,V,strategy) + + uh = zero(U) + # b,A = residual_and_jacobian(op,uh) + # _A = copy(A) + # _b = copy(b) + # residual_and_jacobian!(_b,_A,op,uh) + # @test (norm(_b-b)+1) ≈ 1 + # x = similar(b,Float64,axes(A,2)) + # fill!(x,1) + # @test (norm(A*x-_A*x)+1) ≈ 1 + + nls = PETScNonlinearSolver() + solver = FESolver(nls) + uh = solve(solver,op) + + Ωo = Triangulation(model) + dΩo = Measure(Ωo,2*k) + eh = u - uh + @test sqrt(sum(∫( abs2(eh) )dΩo)) < 1.0e-9 + +end diff --git a/test/PLaplacianDriver_mpi.jl b/test/PLaplacianDriver_mpi.jl new file mode 100644 index 0000000..76134ea --- /dev/null +++ b/test/PLaplacianDriver_mpi.jl @@ -0,0 +1,5 @@ + +include("PLaplacianDriver.jl") + +nparts = (2,1) +prun(main,mpi,nparts) diff --git a/test/PartitionedArraysTests_mpi.jl b/test/PartitionedArraysTests_mpi.jl index e88af3d..2966841 100644 --- a/test/PartitionedArraysTests_mpi.jl +++ b/test/PartitionedArraysTests_mpi.jl @@ -1,18 +1,6 @@ module PartitionedArraysTests_mpi -using MPI -using Test - -function run_mpi_driver(;procs,file) - mpidir = @__DIR__ - testdir = joinpath(mpidir,".") - repodir = joinpath(testdir,"..") - mpiexec() do cmd - run(`$cmd -n $procs $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`) - @test true - end -end - +include("mpiexec.jl") run_mpi_driver(procs=3,file="PartitionedArraysTests_mpi_driver.jl") end # module diff --git a/test/mpiexec.jl b/test/mpiexec.jl new file mode 100644 index 0000000..86301e7 --- /dev/null +++ b/test/mpiexec.jl @@ -0,0 +1,11 @@ +using MPI +using Test +function run_mpi_driver(;procs,file) + mpidir = @__DIR__ + testdir = joinpath(mpidir,"..") + repodir = joinpath(testdir,"..") + mpiexec() do cmd + run(`$cmd -n $procs $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`) + @test true + end +end From c9585d7ebc40300ff6de8208bc2bab45b9af99a5 Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 29 Oct 2021 02:00:23 +1100 Subject: [PATCH 09/14] Putting some order in the test source directory --- test/PLaplacianDriver_mpi.jl | 5 ---- ...PLaplacianDriver.jl => PLaplacianTests.jl} | 0 test/PartitionedArraysTests.jl | 13 ++++++---- test/PartitionedArraysTests_mpi.jl | 6 ----- test/PartitionedArraysTests_sequential.jl | 8 ------ test/mpi/PLaplacianTests.jl | 3 +++ test/mpi/PLaplacianTestsRun.jl | 4 +++ .../PartitionedArraysTests.jl} | 5 +--- test/mpi/PartitionedArraysTestsRun.jl | 4 +++ test/{ => mpi}/mpiexec.jl | 0 test/mpi/runtests.jl | 5 ++++ test/runtests.jl | 19 ++------------ test/{ => sequential}/ElasticityDriver.jl | 0 test/{ => sequential}/PETSCTests.jl | 0 test/{ => sequential}/PETScArraysTests.jl | 0 test/{ => sequential}/PETScAssemblyTests.jl | 0 .../PETScLinearSolversTests.jl | 0 .../PETScNonlinearSolversTests.jl | 0 test/sequential/PartitionedArraysTests.jl | 5 ++++ test/{ => sequential}/PoissonDriver.jl | 0 test/sequential/runtests.jl | 25 +++++++++++++++++++ 21 files changed, 57 insertions(+), 45 deletions(-) delete mode 100644 test/PLaplacianDriver_mpi.jl rename test/{PLaplacianDriver.jl => PLaplacianTests.jl} (100%) delete mode 100644 test/PartitionedArraysTests_mpi.jl delete mode 100644 test/PartitionedArraysTests_sequential.jl create mode 100644 test/mpi/PLaplacianTests.jl create mode 100644 test/mpi/PLaplacianTestsRun.jl rename test/{PartitionedArraysTests_mpi_driver.jl => mpi/PartitionedArraysTests.jl} (53%) create mode 100644 test/mpi/PartitionedArraysTestsRun.jl rename test/{ => mpi}/mpiexec.jl (100%) create mode 100644 test/mpi/runtests.jl rename test/{ => sequential}/ElasticityDriver.jl (100%) rename test/{ => sequential}/PETSCTests.jl (100%) rename test/{ => sequential}/PETScArraysTests.jl (100%) rename test/{ => sequential}/PETScAssemblyTests.jl (100%) rename test/{ => sequential}/PETScLinearSolversTests.jl (100%) rename test/{ => sequential}/PETScNonlinearSolversTests.jl (100%) create mode 100644 test/sequential/PartitionedArraysTests.jl rename test/{ => sequential}/PoissonDriver.jl (100%) create mode 100644 test/sequential/runtests.jl diff --git a/test/PLaplacianDriver_mpi.jl b/test/PLaplacianDriver_mpi.jl deleted file mode 100644 index 76134ea..0000000 --- a/test/PLaplacianDriver_mpi.jl +++ /dev/null @@ -1,5 +0,0 @@ - -include("PLaplacianDriver.jl") - -nparts = (2,1) -prun(main,mpi,nparts) diff --git a/test/PLaplacianDriver.jl b/test/PLaplacianTests.jl similarity index 100% rename from test/PLaplacianDriver.jl rename to test/PLaplacianTests.jl diff --git a/test/PartitionedArraysTests.jl b/test/PartitionedArraysTests.jl index b9834b4..2c03656 100644 --- a/test/PartitionedArraysTests.jl +++ b/test/PartitionedArraysTests.jl @@ -76,34 +76,37 @@ function partitioned_tests(parts) ngids = 7 ids = PRange(parts,ngids,noids,firstgid,hid_to_gid,hid_to_part) values = map_parts(ids.partition) do ids + println(ids.lid_to_gid) 10.0*ids.lid_to_gid end - function test_vectors(v::PVector,x::PETScVector) + function test_vectors(v::PVector,x::PETScVector,ids) PETSC.@check_error_code PETSC.VecView(x.vec[],C_NULL) u = PVector(x,ids) exchange!(u) map_parts(u.values,v.values) do u,v - @test u ≈ v + @test u == v end end v = PVector(values,ids) x = PETScVector(v) - test_vectors(v,x) + test_get_local_vector(v,x) + test_vectors(v,x,ids) if (get_backend(v.values)==mpi) # Copy v into v1 to circumvent (potentia) aliasing of v and x v1=copy(v) fill!(v1,zero(eltype(v))) copy!(v1,x) - test_vectors(v1,x) + exchange!(v1) + test_vectors(v1,x,ids) # Copy x into x1 to circumvent (potential) aliasing of v and x x1=copy(x) fill!(x1,PetscScalar(0.0)) copy!(x1,v) - test_vectors(v,x1) + test_vectors(v,x1,ids) GridapPETSc.Finalize(x1) end diff --git a/test/PartitionedArraysTests_mpi.jl b/test/PartitionedArraysTests_mpi.jl deleted file mode 100644 index 2966841..0000000 --- a/test/PartitionedArraysTests_mpi.jl +++ /dev/null @@ -1,6 +0,0 @@ -module PartitionedArraysTests_mpi - -include("mpiexec.jl") -run_mpi_driver(procs=3,file="PartitionedArraysTests_mpi_driver.jl") - -end # module diff --git a/test/PartitionedArraysTests_sequential.jl b/test/PartitionedArraysTests_sequential.jl deleted file mode 100644 index d21e602..0000000 --- a/test/PartitionedArraysTests_sequential.jl +++ /dev/null @@ -1,8 +0,0 @@ -module PartitionedArraysTests_sequential - -include("PartitionedArraysTests.jl") - -nparts = 3 -prun(partitioned_tests,sequential,nparts) - -end # module diff --git a/test/mpi/PLaplacianTests.jl b/test/mpi/PLaplacianTests.jl new file mode 100644 index 0000000..d9455f6 --- /dev/null +++ b/test/mpi/PLaplacianTests.jl @@ -0,0 +1,3 @@ +include("../PLaplacianTests.jl") +nparts = (2,2) +prun(main,mpi,nparts) diff --git a/test/mpi/PLaplacianTestsRun.jl b/test/mpi/PLaplacianTestsRun.jl new file mode 100644 index 0000000..6a107f9 --- /dev/null +++ b/test/mpi/PLaplacianTestsRun.jl @@ -0,0 +1,4 @@ +module PLaplacianTestsRun + include("mpiexec.jl") + run_mpi_driver(procs=4,file="PLaplacianTests.jl") +end # module diff --git a/test/PartitionedArraysTests_mpi_driver.jl b/test/mpi/PartitionedArraysTests.jl similarity index 53% rename from test/PartitionedArraysTests_mpi_driver.jl rename to test/mpi/PartitionedArraysTests.jl index d886e72..938685f 100644 --- a/test/PartitionedArraysTests_mpi_driver.jl +++ b/test/mpi/PartitionedArraysTests.jl @@ -1,6 +1,3 @@ - - -include("PartitionedArraysTests.jl") - +include("../PartitionedArraysTests.jl") nparts = 3 prun(partitioned_tests,mpi,nparts) diff --git a/test/mpi/PartitionedArraysTestsRun.jl b/test/mpi/PartitionedArraysTestsRun.jl new file mode 100644 index 0000000..1d86bc4 --- /dev/null +++ b/test/mpi/PartitionedArraysTestsRun.jl @@ -0,0 +1,4 @@ +module PartitionedArraysTestsRun + include("mpiexec.jl") + run_mpi_driver(procs=3,file="PartitionedArraysTests.jl") +end # module diff --git a/test/mpiexec.jl b/test/mpi/mpiexec.jl similarity index 100% rename from test/mpiexec.jl rename to test/mpi/mpiexec.jl diff --git a/test/mpi/runtests.jl b/test/mpi/runtests.jl new file mode 100644 index 0000000..8ec54bc --- /dev/null +++ b/test/mpi/runtests.jl @@ -0,0 +1,5 @@ +module GridapPETScMPITests +using Test +@time @testset "PartitionedArrays" begin include("PartitionedArraysTestsRun.jl") end +@time @testset "PLaplacianTests" begin include("PLaplacianTestsRun.jl") end +end # module diff --git a/test/runtests.jl b/test/runtests.jl index 7689270..34b3619 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,22 +2,7 @@ module GridapPETScTests using Test -@time @testset "PETSC" begin include("PETSCTests.jl") end - -@time @testset "PETScArrays" begin include("PETScArraysTests.jl") end - -@time @testset "PartitionedArrays (sequential)" begin include("PartitionedArraysTests_sequential.jl") end - -@time @testset "PartitionedArrays (mpi)" begin include("PartitionedArraysTests_mpi.jl") end - -@time @testset "PETScLinearSolvers" begin include("PETScLinearSolversTests.jl") end - -@time @testset "PETScNonLinearSolvers" begin include("PETScNonlinearSolversTests.jl") end - -@time @testset "PETScAssembly" begin include("PETScAssemblyTests.jl") end - -@time @testset "PoissonDriver" begin include("PoissonDriver.jl") end - -@time @testset "ElasticityDriver" begin include("ElasticityDriver.jl") end +@time @testset "SEQUENTIAL" begin include("sequential/runtests.jl") end +@time @testset "MPI" begin include("mpi/runtests.jl") end end # module diff --git a/test/ElasticityDriver.jl b/test/sequential/ElasticityDriver.jl similarity index 100% rename from test/ElasticityDriver.jl rename to test/sequential/ElasticityDriver.jl diff --git a/test/PETSCTests.jl b/test/sequential/PETSCTests.jl similarity index 100% rename from test/PETSCTests.jl rename to test/sequential/PETSCTests.jl diff --git a/test/PETScArraysTests.jl b/test/sequential/PETScArraysTests.jl similarity index 100% rename from test/PETScArraysTests.jl rename to test/sequential/PETScArraysTests.jl diff --git a/test/PETScAssemblyTests.jl b/test/sequential/PETScAssemblyTests.jl similarity index 100% rename from test/PETScAssemblyTests.jl rename to test/sequential/PETScAssemblyTests.jl diff --git a/test/PETScLinearSolversTests.jl b/test/sequential/PETScLinearSolversTests.jl similarity index 100% rename from test/PETScLinearSolversTests.jl rename to test/sequential/PETScLinearSolversTests.jl diff --git a/test/PETScNonlinearSolversTests.jl b/test/sequential/PETScNonlinearSolversTests.jl similarity index 100% rename from test/PETScNonlinearSolversTests.jl rename to test/sequential/PETScNonlinearSolversTests.jl diff --git a/test/sequential/PartitionedArraysTests.jl b/test/sequential/PartitionedArraysTests.jl new file mode 100644 index 0000000..615f1f9 --- /dev/null +++ b/test/sequential/PartitionedArraysTests.jl @@ -0,0 +1,5 @@ +module PartitionedArraysTests +include("../PartitionedArraysTests.jl") +nparts = 3 +prun(partitioned_tests,sequential,nparts) +end # module diff --git a/test/PoissonDriver.jl b/test/sequential/PoissonDriver.jl similarity index 100% rename from test/PoissonDriver.jl rename to test/sequential/PoissonDriver.jl diff --git a/test/sequential/runtests.jl b/test/sequential/runtests.jl new file mode 100644 index 0000000..29ed044 --- /dev/null +++ b/test/sequential/runtests.jl @@ -0,0 +1,25 @@ +module GridapPETScSequentialTests + +using Test + +@time @testset "PETSC" begin include("PETSCTests.jl") end + +@time @testset "PETScArrays" begin include("PETScArraysTests.jl") end + +@time @testset "PartitionedArrays (sequential)" begin include("PartitionedArraysTests.jl") end + +@time @testset "PETScLinearSolvers" begin include("PETScLinearSolversTests.jl") end + +@time @testset "PETScNonLinearSolvers" begin include("PETScNonlinearSolversTests.jl") end + +@time @testset "PETScAssembly" begin include("PETScAssemblyTests.jl") end + +@time @testset "PoissonDriver" begin include("PoissonDriver.jl") end + +@time @testset "ElasticityDriver" begin include("ElasticityDriver.jl") end + +end # module + + + + From f7c50d701e27b21f6f68fbbebfcae23fef2cf2ad Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 29 Oct 2021 13:23:46 +1100 Subject: [PATCH 10/14] Fixing failing tests --- src/PETScNonlinearSolvers.jl | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 823c961..ab91a35 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -125,7 +125,6 @@ function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver, cache) PETSC.SNESSetJacobian(nls.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) end - function _setup_cache(x,nls::PETScNonlinearSolver,op) res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) res_petsc_vec = PETScVector(res_julia_vec) @@ -136,21 +135,36 @@ function _setup_cache(x,nls::PETScNonlinearSolver,op) # Function, and thus has the data layout of the FE space (i.e., local DOFs # include all DOFs touched by local cells, i.e., owned and ghost cells). # On the other hand, res_petsc_vec has the data layout of the rows of the - # distributed linear system (e.g., local DoFs only include those touched for owned + # distributed linear system (e.g., local DoFs only include those touched from owned # cells/facets during assembly, assuming the SubAssembledRows strategy). # The following lines of code generate a version of x, namely, x_julia_vec, with the - # same data layout as the columns of jac_julia_mat_A, but the contents of x (the owned dofs, and # a subset of the ghost dofs). + # same data layout as the columns of jac_julia_mat_A, but the contents of x + # (for the owned dof values). x_julia_vec = similar(res_julia_vec,eltype(res_julia_vec),(axes(jac_julia_mat_A)[2],)) copy!(x_julia_vec,x) - exchange!(x_julia_vec) x_petsc_vec = PETScVector(x_julia_vec) - PETScNonlinearSolverCache(op, x_julia_vec,res_julia_vec, jac_julia_mat_A,jac_julia_mat_A, x_petsc_vec,res_petsc_vec, jac_petsc_mat_A, jac_petsc_mat_A) end +# Helper private functions to implement the solve! methods below. +# It allows to execute the solve! methods below in a serial context, i.e., +# whenever +function _myexchange!(x::AbstractVector) + x +end +function _myexchange!(x::PVector) + exchange!(x) +end + +function _copy_and_exchange!(a::AbstractVector,b::PETScVector) + copy!(a,b.vec[]) + _myexchange!(a) +end + + function Algebra.solve!(x::T, nls::PETScNonlinearSolver, op::NonlinearOperator, @@ -158,19 +172,15 @@ function Algebra.solve!(x::T, @assert cache.op === op @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) - copy!(x,cache.x_petsc_vec.vec[]) - exchange!(x) + _copy_and_exchange!(x,cache.x_petsc_vec) cache end - function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) cache=_setup_cache(x,nls,op) _set_petsc_residual_function!(nls,cache) _set_petsc_jacobian_function!(nls,cache) @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) - copy!(x,cache.x_petsc_vec.vec[]) - #@check_error_code PETSC.VecView(cache.x_petsc_vec.vec[],PETSC.@PETSC_VIEWER_STDOUT_WORLD) - exchange!(x) + _copy_and_exchange!(x,cache.x_petsc_vec) cache end From addd58638ab7df1f734e9c32dfbcca170a90a46b Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 29 Oct 2021 13:39:45 +1100 Subject: [PATCH 11/14] Relaxing == by \approx in a test with floats (failed on Github Actions CI) --- test/sequential/PETScArraysTests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/sequential/PETScArraysTests.jl b/test/sequential/PETScArraysTests.jl index 2a08165..676ee41 100644 --- a/test/sequential/PETScArraysTests.jl +++ b/test/sequential/PETScArraysTests.jl @@ -80,7 +80,7 @@ GridapPETSc.with(args=split(options)) do aj = rand(PetscScalar,n) ap = convert(PETScVector,aj) @test ap == aj - @test norm(ap) == norm(aj) + @test norm(ap) ≈ norm(aj) @test ap+2*ap == ap+2*ap @test typeof(ap+2*ap) == PETScVector @test ap-2*ap == ap-2*ap From e9c56e1dff283a0bfdf31be1f36f81501213f149 Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 29 Oct 2021 15:54:57 +1100 Subject: [PATCH 12/14] * Using convert instead of PETScVector()/PETScMatrix() constructors to transform PETSc objects into PArrays objects. * Major tidying up and added detailed comments into the new Base.copy! methods introduced. --- src/PETScNonlinearSolvers.jl | 8 ++-- src/PartitionedArrays.jl | 79 ++++++++++++++++++++++------------ test/PartitionedArraysTests.jl | 6 +-- 3 files changed, 58 insertions(+), 35 deletions(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index ab91a35..4a0eb4c 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -125,10 +125,10 @@ function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver, cache) PETSC.SNESSetJacobian(nls.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) end -function _setup_cache(x,nls::PETScNonlinearSolver,op) +function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) - res_petsc_vec = PETScVector(res_julia_vec) - jac_petsc_mat_A = PETScMatrix(jac_julia_mat_A) + res_petsc_vec = convert(PETScVector,res_julia_vec) + jac_petsc_mat_A = convert(PETScMatrix,jac_julia_mat_A) # In a parallel MPI context, x is a vector with a data layout typically different from # the one of res_julia_vec. On the one hand, x holds the free dof values of a FE @@ -142,7 +142,7 @@ function _setup_cache(x,nls::PETScNonlinearSolver,op) # (for the owned dof values). x_julia_vec = similar(res_julia_vec,eltype(res_julia_vec),(axes(jac_julia_mat_A)[2],)) copy!(x_julia_vec,x) - x_petsc_vec = PETScVector(x_julia_vec) + x_petsc_vec = convert(PETScVector,x_julia_vec) PETScNonlinearSolverCache(op, x_julia_vec,res_julia_vec, jac_julia_mat_A,jac_julia_mat_A, x_petsc_vec,res_petsc_vec, diff --git a/src/PartitionedArrays.jl b/src/PartitionedArrays.jl index eed0b3b..ca4c0a1 100644 --- a/src/PartitionedArrays.jl +++ b/src/PartitionedArrays.jl @@ -1,12 +1,14 @@ # Support for PartitionedArrays -function PETScVector(v::PVector) +Base.convert(::Type{PETScVector},v::PVector)=_petsc_vector(v) + +function _petsc_vector(v::PVector) backend = get_backend(v.values) - PETScVector(v,backend) + _petsc_vector(v,backend) end -function PETScVector(v::PVector,::SequentialBackend) +function _petsc_vector(v::PVector,::SequentialBackend) gid_to_value = zeros(eltype(v),length(v)) map_parts(v.values,v.rows.partition) do values,rows @check isa(rows,IndexRange) "Unsupported partition for PETSc vectors" # to be consistent with MPI @@ -17,7 +19,7 @@ function PETScVector(v::PVector,::SequentialBackend) PETScVector(gid_to_value) end -function PETScVector(v::PVector,::MPIBackend) +function _petsc_vector(v::PVector,::MPIBackend) w = PETScVector() N = num_gids(v.rows) comm = v.values.comm # Not sure about this @@ -39,7 +41,7 @@ function PETScVector(v::PVector,::MPIBackend) end function PETScVector(a::PetscScalar,ax::PRange) - PETScVector(PVector(a,ax)) + convert(PETScVector,PVector(a,ax)) end function PartitionedArrays.PVector(v::PETScVector,ids::PRange) @@ -87,7 +89,12 @@ function Base.copy!(pvec::PVector,petscvec::PETScVector) lg=get_local_oh_vector(petscvec) if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector # Only copying owned DoFs. This should be followed by - # an exchange if the ghost DoFs of pvec are to be consumed + # an exchange if the ghost DoFs of pvec are to be consumed. + # We are assuming here that the layout of pvec and petsvec + # are compatible. We do not have any information about the + # layout of petscvec to check this out. We decided NOT to + # convert petscvec into a PVector to avoid extra memory allocation + # and copies. @assert pvec.rows.ghost lx=get_local_vector(lg) vvalues=view(values,indices.oid_to_lid) @@ -95,13 +102,20 @@ function Base.copy!(pvec::PVector,petscvec::PETScVector) restore_local_vector!(lx,lg) GridapPETSc.Finalize(lg) else # petsc_vec is NOT a ghosted vector - @assert !pvec.rows.ghost - @assert length(lg)==length(values) - values .= lg - restore_local_vector!(petscvec,lg) + # @assert !pvec.rows.ghost + # @assert length(lg)==length(values) + # values .= lg + # restore_local_vector!(petscvec,lg) + + # If am not wrong, the code should never enter here. At least + # given how it is being leveraged at present from GridapPETsc. + # If in the future we need this case, the commented lines of + # code above could serve the purpose (not tested). + @notimplemented end end elseif get_backend(pvec.values) == sequential + # I left this as an exercise to the interested. @notimplemented end pvec @@ -110,37 +124,44 @@ end function Base.copy!(petscvec::PETScVector,pvec::PVector) if get_backend(pvec.values) == mpi map_parts(pvec.values,pvec.rows.partition) do values, indices + @check isa(indices,IndexRange) "Unsupported partition for PETSc vectors" lg=get_local_oh_vector(petscvec) if (isa(lg,PETScVector)) # petscvec is a ghosted vector - lx=get_local_vector(lg) - if (pvec.rows.ghost) # pvec is a ghosted vector - @assert length(lx)==length(values) - lx .= values - else - vvalues=view(values,indices.oid_to_lid) - lx[1:num_oids(indices)] .= view(values,indices.oid_to_lid) - end + lx=get_local_vector(lg) + # Only copying owned DoFs. This should be followed by + # an exchange if the ghost DoFs of petscvec are to be consumed. + # We are assuming here that the layout of pvec and petsvec + # are compatible. We do not have any information about the + # layout of petscvec to check this out. + lx[1:num_oids(indices)] .= values[1:num_oids(indices)] restore_local_vector!(lx,lg) GridapPETSc.Finalize(lg) - else # petscvec is NOT a ghosted vector - @assert !pvec.rows.ghost - @assert length(lg)==length(values) - lg .= values - restore_local_vector!(lg,petscvec) + else + # @assert !pvec.rows.ghost + # @assert length(lg)==length(values) + # lg .= values + # restore_local_vector!(lg,petscvec) + # See comment in the function copy! right above + @notimplemented end end elseif get_backend(pvec.values) == sequential - @notimplemented + # I leave this as an exercise to the interested. Essentially the + # same approach as in Base.copy!(petscvec::PETScMatrix,pvec::PSparseMatrix) + # has to be followed. + @notimplemented end petscvec end -function PETScMatrix(a::PSparseMatrix) +Base.convert(::Type{PETScMatrix},a::PSparseMatrix) = _petsc_matrix(a) + +function _petsc_matrix(a::PSparseMatrix) backend = get_backend(a.values) - PETScMatrix(a,backend) + _petsc_matrix(a,backend) end -function PETScMatrix(a::PSparseMatrix,::SequentialBackend) +function _petsc_matrix(a::PSparseMatrix,::SequentialBackend) map_main(a.rows.partition,a.cols.partition) do rows,cols @check isa(rows,IndexRange) "Not supported partition for PETSc matrices" # to be consistent with MPI @check isa(cols,IndexRange) "Not supported partition for PETSc matrices" # to be consistent with MPI @@ -149,7 +170,7 @@ function PETScMatrix(a::PSparseMatrix,::SequentialBackend) convert(PETScMatrix,A) end -function PETScMatrix(a::PSparseMatrix,::MPIBackend) +function _petsc_matrix(a::PSparseMatrix,::MPIBackend) b = PETScMatrix() M = num_gids(a.rows) N = num_gids(a.cols) @@ -179,6 +200,8 @@ end function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) parts=get_part_ids(mat.values) map_parts(parts, mat.values,mat.rows.partition,mat.cols.partition) do part, lmat, rdofs, cdofs + @check isa(rdofs,IndexRange) "Not supported partition for PETSc matrices" + @check isa(cdofs,IndexRange) "Not supported partition for PETSc matrices" Tm = SparseMatrixCSR{0,PetscScalar,PetscInt} csr = convert(Tm,lmat) ia = csr.rowptr diff --git a/test/PartitionedArraysTests.jl b/test/PartitionedArraysTests.jl index 2c03656..04062f5 100644 --- a/test/PartitionedArraysTests.jl +++ b/test/PartitionedArraysTests.jl @@ -90,7 +90,7 @@ function partitioned_tests(parts) end v = PVector(values,ids) - x = PETScVector(v) + x = convert(PETScVector,v) test_get_local_vector(v,x) test_vectors(v,x,ids) @@ -113,7 +113,7 @@ function partitioned_tests(parts) A = PSparseMatrix(I,J,V,ids,ids,ids=:global) display(A.values) - B = PETScMatrix(A) + B = convert(PETScMatrix,A) PETSC.@check_error_code PETSC.MatView(B.mat[],PETSC.@PETSC_VIEWER_STDOUT_WORLD) function solve_system_and_check_solution(A::PSparseMatrix,B::PETScMatrix,v) @@ -121,7 +121,7 @@ function partitioned_tests(parts) solver = PETScLinearSolver() ss = symbolic_setup(solver,B) ns = numerical_setup(ss,B) - y = PETScVector(A*v) + y = convert(PETScVector,A*v) x̂ = PETScVector(0.0,ids) solve!(x̂,ns,y) z = PVector(x̂,ids) From 6af9c27aecf2b28942992dcda8c946b1b3a90d60 Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 29 Oct 2021 16:39:07 +1100 Subject: [PATCH 13/14] Trying to fix failing tests --- src/PETScArrays.jl | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index 0596df5..f050eb7 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -271,18 +271,6 @@ function PETScMatrix(csr::SparseMatrixCSR{0,PetscScalar,PetscInt}) Init(A) end -function PETScMatrix(a::AbstractMatrix{PetscScalar}) - m, n = size(a) - i = [PetscInt(n*(i-1)) for i=1:m+1] - j = [PetscInt(j-1) for i=1:m for j=1:n] - v = [ a[i,j] for i=1:m for j=1:n] - A = PETScMatrix() - A.ownership = a - @check_error_code PETSC.MatCreateSeqAIJWithArrays(MPI.COMM_SELF,m,n,i,j,v,A.mat) - @check_error_code PETSC.MatAssemblyBegin(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) - @check_error_code PETSC.MatAssemblyEnd(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) - Init(A) -end function Base.similar(::PETScMatrix,::Type{PetscScalar},ax::Tuple{Int,Int}) PETScMatrix(ax[1],ax[2]) @@ -354,6 +342,19 @@ function Base.convert(::Type{PETScMatrix},a::AbstractSparseMatrix) PETScMatrix(csr) end +function Base.convert(::Type{PETScMatrix}, a::AbstractMatrix{PetscScalar}) + m, n = size(a) + i = [PetscInt(n*(i-1)) for i=1:m+1] + j = [PetscInt(j-1) for i=1:m for j=1:n] + v = [ a[i,j] for i=1:m for j=1:n] + A = PETScMatrix() + A.ownership = a + @check_error_code PETSC.MatCreateSeqAIJWithArrays(MPI.COMM_SELF,m,n,i,j,v,A.mat) + @check_error_code PETSC.MatAssemblyBegin(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) + Init(A) +end + function petsc_sparse(i,j,v,m,n) csr = sparsecsr(Val(0),i,j,v,m,n) convert(PETScMatrix,csr) From dbcca1bcdd8c9743a0b567c46c88cae107f73dea Mon Sep 17 00:00:00 2001 From: amartin Date: Sat, 30 Oct 2021 01:09:03 +1100 Subject: [PATCH 14/14] * Adding GridapDistributed to [compat] * ReAdding Manifest.toml to .gitignore. --- .gitignore | 2 +- Project.toml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 9225b4a..bd4ad51 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ *.jl.cov *.jl.mem .DS_Store -#Manifest.toml +Manifest.toml /dev/ /docs/build/ /docs/site/ diff --git a/Project.toml b/Project.toml index aa79dce..5c536a6 100644 --- a/Project.toml +++ b/Project.toml @@ -16,6 +16,7 @@ SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1" [compat] Gridap = "0.17" +GridapDistributed = "0.2.0" MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19" PETSc_jll = "3.13" PartitionedArrays = "0.2.4"