diff --git a/test/LinearSolvers/BlockDiagonalSmoothersTests.jl b/test/LinearSolvers/BlockDiagonalSmoothersTests.jl index 87adf3df..06e48174 100644 --- a/test/LinearSolvers/BlockDiagonalSmoothersTests.jl +++ b/test/LinearSolvers/BlockDiagonalSmoothersTests.jl @@ -59,19 +59,25 @@ function is_same_vector(x1::BlockPVector,x2,X1,X2) _is_same_vector(_x1,_x2,X1,X2) end -function main(model,use_petsc::Bool) - if use_petsc - GridapPETSc.with() do - solvers = Fill(PETScLinearSolver(set_ksp_options),2) - main(model,solvers) - end +function get_mesh(parts,np) + Dc = length(np) + if Dc == 2 + domain = (0,1,0,1) + nc = (8,8) else - solvers = Fill(LUSolver(),2) - main(model,solvers) + @assert Dc == 3 + domain = (0,1,0,1,0,1) + nc = (8,8,8) end + if prod(np) == 1 + model = CartesianDiscreteModel(domain,nc) + else + model = CartesianDiscreteModel(parts,np,domain,nc) + end + return model end -function main(model,solvers) +function main_driver(model,solvers) order = 2 reffeᵤ = ReferenceFE(lagrangian,VectorValue{D,Float64},order) V = TestFESpace(model,reffeᵤ,conformity=:H1,dirichlet_tags=["boundary"]) @@ -126,24 +132,18 @@ function main(model,solvers) @test is_same_vector(x,x_star,Xb,X) end -num_ranks = (2,2) -parts = with_debug() do distribute - distribute(LinearIndices((prod(num_ranks),))) +function main(distribute,np,use_petsc::Bool) + parts = distribute(LinearIndices((prod(np),))) + model = get_mesh(parts,np) + if use_petsc + GridapPETSc.with() do + solvers = Fill(PETScLinearSolver(set_ksp_options),2) + main_driver(model,solvers) + end + else + solvers = Fill(LUSolver(),2) + main_driver(model,solvers) + end end -D = 2 -n = 10 -domain = Tuple(repeat([0,1],D)) -mesh_partition = (n,n) - -# Serial -model = CartesianDiscreteModel(domain,mesh_partition) -main(model,false) -main(model,true) - -# Distributed, sequential -model = CartesianDiscreteModel(parts,num_ranks,domain,mesh_partition) -main(model,false) -main(model,true) - end \ No newline at end of file diff --git a/test/LinearSolvers/SchurComplementSolversTests.jl b/test/LinearSolvers/SchurComplementSolversTests.jl index 123ad7bf..6a2af3b5 100644 --- a/test/LinearSolvers/SchurComplementSolversTests.jl +++ b/test/LinearSolvers/SchurComplementSolversTests.jl @@ -27,6 +27,24 @@ function l2_error(x,sol,X,dΩ) return l2_error(xh,sol,dΩ) end +function get_mesh(parts,np) + Dc = length(np) + if Dc == 2 + domain = (0,1,0,1) + nc = (8,8) + else + @assert Dc == 3 + domain = (0,1,0,1,0,1) + nc = (8,8,8) + end + if prod(np) == 1 + model = CartesianDiscreteModel(domain,nc) + else + model = CartesianDiscreteModel(parts,np,domain,nc) + end + return model +end + # Darcy solution const β_U = 50.0 const γ = 100.0 @@ -35,7 +53,9 @@ u_ref(x) = VectorValue(x[1]+x[2],-x[2]) p_ref(x) = 2.0*x[1]-1.0 f_ref(x) = u_ref(x) + ∇(p_ref)(x) -function main(model) +function main(distribute,np) + parts = distribute(LinearIndices((prod(np),))) + model = get_mesh(parts,np) labels = get_face_labeling(model) add_tag_from_tags!(labels,"dirichlet",[1,2,3,4,5,6,7]) @@ -99,22 +119,4 @@ function main(model) @test l2_error(ph,p_ref,dΩ) < 1.e-4 end -num_ranks = (2,2) -parts = with_debug() do distribute - distribute(LinearIndices((prod(num_ranks),))) -end - -D = 2 -n = 60 -domain = Tuple(repeat([0,1],D)) -mesh_partition = (n,n) - -# Serial -model = CartesianDiscreteModel(domain,mesh_partition) -main(model) - -# Distributed, sequential -model = CartesianDiscreteModel(parts,num_ranks,domain,mesh_partition) -main(model) - end \ No newline at end of file diff --git a/test/LinearSolvers/mpi/BlockDiagonalSmoothersTests.jl b/test/LinearSolvers/mpi/BlockDiagonalSmoothersTests.jl new file mode 100644 index 00000000..59384b28 --- /dev/null +++ b/test/LinearSolvers/mpi/BlockDiagonalSmoothersTests.jl @@ -0,0 +1,10 @@ +module BlockDiagonalSmoothersTestsMPI +using PartitionedArrays, MPI +include("../BlockDiagonalSmoothersTests.jl") + +with_mpi() do distribute + BlockDiagonalSmoothersTests.main(distribute,(2,2),false) + BlockDiagonalSmoothersTests.main(distribute,(2,2),true) +end + +end \ No newline at end of file diff --git a/test/LinearSolvers/mpi/SchurComplementSolversTests.jl b/test/LinearSolvers/mpi/SchurComplementSolversTests.jl new file mode 100644 index 00000000..b20653fb --- /dev/null +++ b/test/LinearSolvers/mpi/SchurComplementSolversTests.jl @@ -0,0 +1,9 @@ +module SchurComplementSolversTestsMPI +using PartitionedArrays, MPI +include("../SchurComplementSolversTests.jl") + +with_mpi() do distribute + SchurComplementSolversTests.main(distribute,(2,2)) +end + +end \ No newline at end of file diff --git a/test/LinearSolvers/seq/BlockDiagonalSmoothersTests.jl b/test/LinearSolvers/seq/BlockDiagonalSmoothersTests.jl new file mode 100644 index 00000000..7515c06c --- /dev/null +++ b/test/LinearSolvers/seq/BlockDiagonalSmoothersTests.jl @@ -0,0 +1,12 @@ +module BlockDiagonalSmoothersTestsSeq +using PartitionedArrays +include("../BlockDiagonalSmoothersTests.jl") + +with_debug() do distribute + BlockDiagonalSmoothersTests.main(distribute,(1,1),false) + BlockDiagonalSmoothersTests.main(distribute,(1,1),true) + BlockDiagonalSmoothersTests.main(distribute,(2,2),false) + BlockDiagonalSmoothersTests.main(distribute,(2,2),true) +end + +end \ No newline at end of file diff --git a/test/LinearSolvers/seq/ShurComplementSolversTests.jl b/test/LinearSolvers/seq/ShurComplementSolversTests.jl new file mode 100644 index 00000000..daf1742e --- /dev/null +++ b/test/LinearSolvers/seq/ShurComplementSolversTests.jl @@ -0,0 +1,10 @@ +module SchurComplementSolversTestsSequential +using PartitionedArrays +include("../SchurComplementSolversTests.jl") + +with_debug() do distribute + SchurComplementSolversTests.main(distribute,(1,1)) + SchurComplementSolversTests.main(distribute,(2,2)) +end + +end \ No newline at end of file