-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #46 from gridap/petsc-solvers
Complex PETSc solvers
- Loading branch information
Showing
8 changed files
with
701 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
""" | ||
GMRES + AMG solver, specifically designed for linear elasticity problems. | ||
Follows PETSc's documentation for [PCAMG](https://petsc.org/release/manualpages/PC/PCGAMG.html) | ||
and [MatNullSpaceCreateRigidBody](https://petsc.org/release/manualpages/Mat/MatNullSpaceCreateRigidBody.html). | ||
""" | ||
struct ElasticitySolver{A} <: Algebra.LinearSolver | ||
space :: A | ||
tols :: SolverTolerances{Float64} | ||
function ElasticitySolver(space::FESpace; | ||
maxiter=500,atol=1.e-12,rtol=1.e-8) | ||
tols = SolverTolerances{Float64}(;maxiter=maxiter,atol=atol,rtol=rtol) | ||
A = typeof(space) | ||
new{A}(space,tols) | ||
end | ||
end | ||
|
||
SolverInterfaces.get_solver_tolerances(s::ElasticitySolver) = s.tols | ||
|
||
struct ElasticitySymbolicSetup{A} <: SymbolicSetup | ||
solver::A | ||
end | ||
|
||
function Gridap.Algebra.symbolic_setup(solver::ElasticitySolver,A::AbstractMatrix) | ||
ElasticitySymbolicSetup(solver) | ||
end | ||
|
||
function elasticity_ksp_setup(ksp,tols) | ||
rtol = PetscScalar(tols.rtol) | ||
atol = PetscScalar(tols.atol) | ||
dtol = PetscScalar(tols.dtol) | ||
maxits = PetscInt(tols.maxiter) | ||
|
||
@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[],GridapPETSc.PETSC.KSPCG) | ||
@check_error_code GridapPETSc.PETSC.KSPSetTolerances(ksp[], rtol, atol, dtol, maxits) | ||
|
||
pc = Ref{GridapPETSc.PETSC.PC}() | ||
@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) | ||
@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCGAMG) | ||
|
||
@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) | ||
end | ||
|
||
mutable struct ElasticityNumericalSetup <: NumericalSetup | ||
A::PETScMatrix | ||
X::PETScVector | ||
B::PETScVector | ||
ksp::Ref{GridapPETSc.PETSC.KSP} | ||
null::Ref{GridapPETSc.PETSC.MatNullSpace} | ||
initialized::Bool | ||
function ElasticityNumericalSetup(A::PETScMatrix,X::PETScVector,B::PETScVector) | ||
ksp = Ref{GridapPETSc.PETSC.KSP}() | ||
null = Ref{GridapPETSc.PETSC.MatNullSpace}() | ||
new(A,X,B,ksp,null,false) | ||
end | ||
end | ||
|
||
function GridapPETSc.Init(a::ElasticityNumericalSetup) | ||
@assert Threads.threadid() == 1 | ||
GridapPETSc._NREFS[] += 2 | ||
a.initialized = true | ||
finalizer(GridapPETSc.Finalize,a) | ||
end | ||
|
||
function GridapPETSc.Finalize(ns::ElasticityNumericalSetup) | ||
if ns.initialized && GridapPETSc.Initialized() | ||
if ns.A.comm == MPI.COMM_SELF | ||
@check_error_code GridapPETSc.PETSC.KSPDestroy(ns.ksp) | ||
@check_error_code GridapPETSc.PETSC.MatNullSpaceDestroy(ns.null) | ||
else | ||
@check_error_code GridapPETSc.PETSC.PetscObjectRegisterDestroy(ns.ksp[].ptr) | ||
@check_error_code GridapPETSc.PETSC.PetscObjectRegisterDestroy(ns.null[].ptr) | ||
end | ||
ns.initialized = false | ||
@assert Threads.threadid() == 1 | ||
GridapPETSc._NREFS[] -= 2 | ||
end | ||
nothing | ||
end | ||
|
||
function Gridap.Algebra.numerical_setup(ss::ElasticitySymbolicSetup,_A::PSparseMatrix) | ||
_num_dims(space::FESpace) = num_cell_dims(get_triangulation(space)) | ||
_num_dims(space::GridapDistributed.DistributedSingleFieldFESpace) = getany(map(_num_dims,local_views(space))) | ||
s = ss.solver | ||
|
||
# Create ns | ||
A = convert(PETScMatrix,_A) | ||
X = convert(PETScVector,allocate_in_domain(_A)) | ||
B = convert(PETScVector,allocate_in_domain(_A)) | ||
ns = ElasticityNumericalSetup(A,X,B) | ||
|
||
# Compute coordinates for owned dofs | ||
dof_coords = convert(PETScVector,get_dof_coordinates(s.space)) | ||
@check_error_code GridapPETSc.PETSC.VecSetBlockSize(dof_coords.vec[],_num_dims(s.space)) | ||
|
||
# Create matrix nullspace | ||
@check_error_code GridapPETSc.PETSC.MatNullSpaceCreateRigidBody(dof_coords.vec[],ns.null) | ||
@check_error_code GridapPETSc.PETSC.MatSetNearNullSpace(ns.A.mat[],ns.null[]) | ||
|
||
# Setup solver and preconditioner | ||
@check_error_code GridapPETSc.PETSC.KSPCreate(ns.A.comm,ns.ksp) | ||
@check_error_code GridapPETSc.PETSC.KSPSetOperators(ns.ksp[],ns.A.mat[],ns.A.mat[]) | ||
elasticity_ksp_setup(ns.ksp,s.tols) | ||
@check_error_code GridapPETSc.PETSC.KSPSetUp(ns.ksp[]) | ||
GridapPETSc.Init(ns) | ||
end | ||
|
||
function Gridap.Algebra.numerical_setup!(ns::ElasticityNumericalSetup,A::AbstractMatrix) | ||
ns.A = convert(PETScMatrix,A) | ||
@check_error_code GridapPETSc.PETSC.MatSetNearNullSpace(ns.A.mat[],ns.null[]) | ||
@check_error_code GridapPETSc.PETSC.KSPSetOperators(ns.ksp[],ns.A.mat[],ns.A.mat[]) | ||
@check_error_code GridapPETSc.PETSC.KSPSetUp(ns.ksp[]) | ||
ns | ||
end | ||
|
||
function Algebra.solve!(x::AbstractVector{PetscScalar},ns::ElasticityNumericalSetup,b::AbstractVector{PetscScalar}) | ||
X, B = ns.X, ns.B | ||
copy!(B,b) | ||
@check_error_code GridapPETSc.PETSC.KSPSolve(ns.ksp[],B.vec[],X.vec[]) | ||
copy!(x,X) | ||
return x | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
|
||
function ADS_Solver(model,tags,order;rtol=1e-8,maxits=300) | ||
@assert (num_cell_dims(model) == 3) "Not implemented for 2D" | ||
@assert (order == 1) "Only works for linear order" | ||
|
||
V_H1_sc, V_H1, V_Hdiv, V_Hcurl = get_ads_spaces(model,order,tags) | ||
G, C, Π_div, Π_curl = get_ads_operators(V_H1_sc,V_H1,V_Hcurl,V_Hdiv) | ||
|
||
D = num_cell_dims(model) | ||
ksp_setup(ksp) = ads_ksp_setup(ksp,rtol,maxits,D,G,C,Π_div,Π_curl) | ||
return PETScLinearSolver(ksp_setup) | ||
end | ||
|
||
function get_ads_spaces(model,order,tags) | ||
reffe_H1_sc = ReferenceFE(lagrangian,Float64,order) | ||
V_H1_sc = FESpace(model,reffe_H1_sc;dirichlet_tags=tags) | ||
|
||
reffe_H1 = ReferenceFE(lagrangian,VectorValue{D,Float64},order) | ||
V_H1 = FESpace(model,reffe_H1;dirichlet_tags=tags) | ||
|
||
reffe_Hdiv = ReferenceFE(raviart_thomas,Float64,order-1) | ||
V_Hdiv = FESpace(model,reffe_Hdiv;dirichlet_tags=tags) | ||
|
||
reffe_Hcurl = ReferenceFE(nedelec,Float64,order-1) | ||
V_Hcurl = FESpace(model,reffe_Hcurl;dirichlet_tags=tags) | ||
|
||
return V_H1_sc, V_H1, V_Hdiv, V_Hcurl | ||
end | ||
|
||
function get_ads_operators(V_H1_sc,V_H1_vec,V_Hcurl,V_Hdiv) | ||
G = interpolation_operator(u->∇(u),V_H1_sc,V_Hcurl) | ||
C = interpolation_operator(u->cross(∇,u),V_Hcurl,V_Hdiv) | ||
Π_div = interpolation_operator(u->u,V_H1_vec,V_Hdiv) | ||
Π_curl = interpolation_operator(u->u,V_H1_vec,V_Hcurl) | ||
return G, C, Π_div, Π_curl | ||
end | ||
|
||
function ads_ksp_setup(ksp,rtol,maxits,dim,G,C,Π_div,Π_curl) | ||
rtol = PetscScalar(rtol) | ||
atol = GridapPETSc.PETSC.PETSC_DEFAULT | ||
dtol = GridapPETSc.PETSC.PETSC_DEFAULT | ||
maxits = PetscInt(maxits) | ||
|
||
@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[],GridapPETSc.PETSC.KSPGMRES) | ||
@check_error_code GridapPETSc.PETSC.KSPSetTolerances(ksp[], rtol, atol, dtol, maxits) | ||
|
||
pc = Ref{GridapPETSc.PETSC.PC}() | ||
@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc) | ||
@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCHYPRE) | ||
|
||
_G = convert(PETScMatrix,G) | ||
_C = convert(PETScMatrix,C) | ||
_Π_div = convert(PETScMatrix,Π_div) | ||
_Π_curl = convert(PETScMatrix,Π_curl) | ||
@check_error_code GridapPETSc.PETSC.PCHYPRESetDiscreteGradient(pc[],_G.mat[]) | ||
@check_error_code GridapPETSc.PETSC.PCHYPRESetDiscreteCurl(pc[],_C.mat[]) | ||
@check_error_code GridapPETSc.PETSC.PCHYPRESetInterpolations(pc[],dim,_Π_div.mat[],C_NULL,_Π_curl.mat[],C_NULL) | ||
|
||
@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
|
||
# DoF coordinates | ||
|
||
""" | ||
Given a lagrangian FESpace, returns the physical coordinates of the DoFs, as required | ||
by some PETSc solvers. See [PETSc documentation](https://petsc.org/release/manualpages/PC/PCSetCoordinates.html). | ||
""" | ||
function get_dof_coordinates(space::GridapDistributed.DistributedSingleFieldFESpace) | ||
coords = map(local_views(space),partition(space.gids)) do space, dof_ids | ||
local_to_own_dofs = local_to_own(dof_ids) | ||
return get_dof_coordinates(space;perm=local_to_own_dofs) | ||
end | ||
|
||
ngdofs = length(space.gids) | ||
indices = map(local_views(space.gids)) do dof_indices | ||
owner = part_id(dof_indices) | ||
own_indices = OwnIndices(ngdofs,owner,own_to_global(dof_indices)) | ||
ghost_indices = GhostIndices(ngdofs,Int64[],Int32[]) # We only consider owned dofs | ||
OwnAndGhostIndices(own_indices,ghost_indices) | ||
end | ||
return PVector(coords,indices) | ||
end | ||
|
||
function get_dof_coordinates(space::FESpace;perm=Base.OneTo(num_free_dofs(space))) | ||
trian = get_triangulation(space) | ||
cell_dofs = get_fe_dof_basis(space) | ||
cell_ids = get_cell_dof_ids(space) | ||
|
||
cell_ref_nodes = lazy_map(get_nodes,CellData.get_data(cell_dofs)) | ||
cell_dof_to_node = lazy_map(get_dof_to_node,CellData.get_data(cell_dofs)) | ||
cell_dof_to_comp = lazy_map(get_dof_to_comp,CellData.get_data(cell_dofs)) | ||
|
||
cmaps = get_cell_map(trian) | ||
cell_phys_nodes = lazy_map(evaluate,cmaps,cell_ref_nodes) | ||
|
||
node_coords = Vector{Float64}(undef,maximum(perm)) | ||
cache_nodes = array_cache(cell_phys_nodes) | ||
cache_ids = array_cache(cell_ids) | ||
cache_dof_to_node = array_cache(cell_dof_to_node) | ||
cache_dof_to_comp = array_cache(cell_dof_to_comp) | ||
for cell in 1:num_cells(trian) | ||
ids = getindex!(cache_ids,cell_ids,cell) | ||
nodes = getindex!(cache_nodes,cell_phys_nodes,cell) | ||
dof_to_comp = getindex!(cache_dof_to_comp,cell_dof_to_comp,cell) | ||
dof_to_node = getindex!(cache_dof_to_node,cell_dof_to_node,cell) | ||
for (dof,c,n) in zip(ids,dof_to_comp,dof_to_node) | ||
if (dof > 0) && (perm[dof] > 0) | ||
node_coords[perm[dof]] = nodes[n][c] | ||
end | ||
end | ||
end | ||
return node_coords | ||
end | ||
|
||
# Interpolation matrices | ||
|
||
function interpolation_operator(op,U_in,V_out; | ||
strat=SubAssembledRows(), | ||
Tm=SparseMatrixCSR{0,PetscScalar,PetscInt}, | ||
Tv=Vector{PetscScalar}) | ||
out_dofs = get_fe_dof_basis(V_out) | ||
in_basis = get_fe_basis(U_in) | ||
|
||
cell_interp_mats = out_dofs(op(in_basis)) | ||
local_contr = map(local_views(out_dofs),cell_interp_mats) do dofs, arr | ||
contr = DomainContribution() | ||
add_contribution!(contr,get_triangulation(dofs),arr) | ||
return contr | ||
end | ||
contr = GridapDistributed.DistributedDomainContribution(local_contr) | ||
|
||
matdata = collect_cell_matrix(U_in,V_out,contr) | ||
assem = SparseMatrixAssembler(Tm,Tv,U_in,V_out,strat) | ||
|
||
I = allocate_matrix(assem,matdata) | ||
takelast_matrix!(I,assem,matdata) | ||
return I | ||
end | ||
|
||
function takelast_matrix(a::SparseMatrixAssembler,matdata) | ||
m1 = Gridap.Algebra.nz_counter(get_matrix_builder(a),(get_rows(a),get_cols(a))) | ||
symbolic_loop_matrix!(m1,a,matdata) | ||
m2 = Gridap.Algebra.nz_allocation(m1) | ||
takelast_loop_matrix!(m2,a,matdata) | ||
m3 = Gridap.Algebra.create_from_nz(m2) | ||
return m3 | ||
end | ||
|
||
function takelast_matrix!(mat,a::SparseMatrixAssembler,matdata) | ||
LinearAlgebra.fillstored!(mat,zero(eltype(mat))) | ||
takelast_matrix_add!(mat,a,matdata) | ||
end | ||
|
||
function takelast_matrix_add!(mat,a::SparseMatrixAssembler,matdata) | ||
takelast_loop_matrix!(mat,a,matdata) | ||
Gridap.Algebra.create_from_nz(mat) | ||
end | ||
|
||
function takelast_loop_matrix!(A,a::GridapDistributed.DistributedSparseMatrixAssembler,matdata) | ||
rows = get_rows(a) | ||
cols = get_cols(a) | ||
map(takelast_loop_matrix!,local_views(A,rows,cols),local_views(a),matdata) | ||
end | ||
|
||
function takelast_loop_matrix!(A,a::SparseMatrixAssembler,matdata) | ||
strategy = Gridap.FESpaces.get_assembly_strategy(a) | ||
for (cellmat,_cellidsrows,_cellidscols) in zip(matdata...) | ||
cellidsrows = Gridap.FESpaces.map_cell_rows(strategy,_cellidsrows) | ||
cellidscols = Gridap.FESpaces.map_cell_cols(strategy,_cellidscols) | ||
@assert length(cellidscols) == length(cellidsrows) | ||
@assert length(cellmat) == length(cellidsrows) | ||
if length(cellmat) > 0 | ||
rows_cache = array_cache(cellidsrows) | ||
cols_cache = array_cache(cellidscols) | ||
vals_cache = array_cache(cellmat) | ||
mat1 = getindex!(vals_cache,cellmat,1) | ||
rows1 = getindex!(rows_cache,cellidsrows,1) | ||
cols1 = getindex!(cols_cache,cellidscols,1) | ||
add! = Gridap.Arrays.AddEntriesMap((a,b) -> b) | ||
add_cache = return_cache(add!,A,mat1,rows1,cols1) | ||
caches = add_cache, vals_cache, rows_cache, cols_cache | ||
_takelast_loop_matrix!(A,caches,cellmat,cellidsrows,cellidscols) | ||
end | ||
end | ||
A | ||
end | ||
|
||
@noinline function _takelast_loop_matrix!(mat,caches,cell_vals,cell_rows,cell_cols) | ||
add_cache, vals_cache, rows_cache, cols_cache = caches | ||
add! = Gridap.Arrays.AddEntriesMap((a,b) -> b) | ||
for cell in 1:length(cell_cols) | ||
rows = getindex!(rows_cache,cell_rows,cell) | ||
cols = getindex!(cols_cache,cell_cols,cell) | ||
vals = getindex!(vals_cache,cell_vals,cell) | ||
evaluate!(add_cache,add!,mat,vals,rows,cols) | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.