Skip to content

Commit

Permalink
Merge pull request #8 from fverdugo/api_tests
Browse files Browse the repository at this point in the history
Api tests
  • Loading branch information
fverdugo authored May 17, 2024
2 parents f7f25ca + dae2543 commit 44b5f4c
Show file tree
Hide file tree
Showing 6 changed files with 326 additions and 144 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1"

[compat]
MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.20"
PETSc_jll = "=3.13.4, =3.15.2"
PETSc_jll = "3"
PartitionedArrays = "0.4"
Preferences = "1"
SparseArrays = "1"
Expand Down
341 changes: 200 additions & 141 deletions src/api.jl

Large diffs are not rendered by default.

33 changes: 33 additions & 0 deletions test/mpi_array/api_test.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
module ApiTests

using Test
using MPI
using PartitionedArrays

repodir = normpath(joinpath(@__DIR__,"..",".."))

defs = joinpath(repodir,"test","mpi_array","api_test_defs.jl")

include(defs)
params = (;nodes_per_dir=(10,10,10),parts_per_dir=(1,1,1))
with_mpi(dist->Defs.main(dist,params))

code = quote
using MPI; MPI.Init()
using PartitionedArrays
include($defs)
params = (;nodes_per_dir=(10,10,10),parts_per_dir=(2,2,2))
with_mpi(dist->Defs.main(dist,params))
end
run(`$(mpiexec()) -np 8 $(Base.julia_cmd()) --project=$repodir -e $code`)

code = quote
using MPI; MPI.Init()
using PartitionedArrays
include($defs)
params = (;nodes_per_dir=(10,10,10),parts_per_dir=(2,4,1))
with_mpi(dist->Defs.main(dist,params))
end
run(`$(mpiexec()) -np 8 $(Base.julia_cmd()) --project=$repodir -e $code`)

end # module
85 changes: 85 additions & 0 deletions test/mpi_array/api_test_defs.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
module Defs

using PartitionedArrays
using PetscCall
using LinearAlgebra
using Test

function spmv_petsc!(b,A,x)
# Convert the input to petsc objects
mat = Ref{PetscCall.Mat}()
vec_b = Ref{PetscCall.Vec}()
vec_x = Ref{PetscCall.Vec}()
parts = linear_indices(partition(x))
petsc_comm = PetscCall.setup_petsc_comm(parts)
args_A = PetscCall.MatCreateMPIAIJWithSplitArrays_args(A,petsc_comm)
args_b = PetscCall.VecCreateMPIWithArray_args(copy(b),petsc_comm)
args_x = PetscCall.VecCreateMPIWithArray_args(copy(x),petsc_comm)
ownership = (args_A,args_b,args_x)
PetscCall.@check_error_code PetscCall.MatCreateMPIAIJWithSplitArrays(args_A...,mat)
PetscCall.@check_error_code PetscCall.MatAssemblyBegin(mat[],PetscCall.MAT_FINAL_ASSEMBLY)
PetscCall.@check_error_code PetscCall.MatAssemblyEnd(mat[],PetscCall.MAT_FINAL_ASSEMBLY)
PetscCall.@check_error_code PetscCall.VecCreateMPIWithArray(args_b...,vec_b)
PetscCall.@check_error_code PetscCall.VecCreateMPIWithArray(args_x...,vec_x)
# This line does the actual product
PetscCall.@check_error_code PetscCall.MatMult(mat[],vec_x[],vec_b[])
# Move the result back to julia
PetscCall.VecCreateMPIWithArray_args_reversed!(b,args_b)
# Cleanup
GC.@preserve ownership PetscCall.@check_error_code PetscCall.MatDestroy(mat)
GC.@preserve ownership PetscCall.@check_error_code PetscCall.VecDestroy(vec_b)
GC.@preserve ownership PetscCall.@check_error_code PetscCall.VecDestroy(vec_x)
b
end

function test_spmm_petsc(A,B)
parts = linear_indices(partition(A))
petsc_comm = PetscCall.setup_petsc_comm(parts)
C1, cacheC = spmm(A,B,reuse=true)
mat_A = Ref{PetscCall.Mat}()
mat_B = Ref{PetscCall.Mat}()
mat_C = Ref{PetscCall.Mat}()
args_A = PetscCall.MatCreateMPIAIJWithSplitArrays_args(A,petsc_comm)
args_B = PetscCall.MatCreateMPIAIJWithSplitArrays_args(B,petsc_comm)
ownership = (args_A,args_B)
PetscCall.@check_error_code PetscCall.MatCreateMPIAIJWithSplitArrays(args_A...,mat_A)
PetscCall.@check_error_code PetscCall.MatCreateMPIAIJWithSplitArrays(args_B...,mat_B)
PetscCall.@check_error_code PetscCall.MatProductCreate(mat_A[],mat_B[],C_NULL,mat_C)
PetscCall.@check_error_code PetscCall.MatProductSetType(mat_C[],PetscCall.MATPRODUCT_AB)
PetscCall.@check_error_code PetscCall.MatProductSetFromOptions(mat_C[])
PetscCall.@check_error_code PetscCall.MatProductSymbolic(mat_C[])
PetscCall.@check_error_code PetscCall.MatProductNumeric(mat_C[])
PetscCall.@check_error_code PetscCall.MatProductReplaceMats(mat_A[],mat_B[],C_NULL,mat_C[])
PetscCall.@check_error_code PetscCall.MatProductNumeric(mat_C[])
PetscCall.@check_error_code PetscCall.MatProductClear(mat_C[])
GC.@preserve ownership PetscCall.@check_error_code PetscCall.MatDestroy(mat_A)
GC.@preserve ownership PetscCall.@check_error_code PetscCall.MatDestroy(mat_B)
GC.@preserve ownership PetscCall.@check_error_code PetscCall.MatDestroy(mat_C)
end

function main(distribute,params)
nodes_per_dir = params.nodes_per_dir
parts_per_dir = params.parts_per_dir
np = prod(parts_per_dir)
ranks = LinearIndices((np,)) |> distribute
A = PartitionedArrays.laplace_matrix(nodes_per_dir,parts_per_dir,ranks)
rows = partition(axes(A,1))
cols = partition(axes(A,2))
x = pones(cols)
b1 = pzeros(rows)
b2 = pzeros(rows)
mul!(b1,A,x)
if ! PetscCall.initialized()
PetscCall.init()
end
spmv_petsc!(b2,A,x)
c = b1-b2
tol = 1.0e-12
@test norm(b1) > tol
@test norm(b2) > tol
@test norm(c)/norm(b1) < tol
B = 2*A
test_spmm_petsc(A,B)
end

end #module
5 changes: 3 additions & 2 deletions test/mpi_array/ksp_test.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
module KspTests

using MPI
using Test

Expand All @@ -19,6 +21,5 @@ end

run(`$mpiexec_cmd -np 3 $(Base.julia_cmd()) --project=$repodir -e $code`)

nothing

end # module

4 changes: 4 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ module PetscCallTest
using PetscCall
using Test

@testset "API" begin
@testset "PartitionedArrays: MPIArray" begin include("mpi_array/api_test.jl") end
end

@testset "KSP" begin
@testset "Sequential" begin include("ksp_test.jl") end
@testset "PartitionedArrays: DebugArray" begin include("debug_array/ksp_test.jl") end
Expand Down

0 comments on commit 44b5f4c

Please sign in to comment.