diff --git a/examples/DMSTAG_Stokes_2D.jl b/examples/DMSTAG_Stokes_2D.jl index 6c787d15..49ca559b 100644 --- a/examples/DMSTAG_Stokes_2D.jl +++ b/examples/DMSTAG_Stokes_2D.jl @@ -1,7 +1,3 @@ -# EXCLUDE FROM TESTING -# NOTE: This is temporarily not working until we merge the DMSTAG routines with the new Clang branch -# -# # This shows how to solve the 2D incompressible Stokes equations using SNES solvers, # using a staggered grid discretization and a Velocity-Pressure formulation # @@ -23,14 +19,15 @@ using PETSc, MPI using ForwardDiff, SparseArrays, SparseDiffTools -PETSc.initialize() +petsclib = PETSc.petsclibs[1]; +PETSc.initialize(petsclib) -function FormRes!(f_g, x_g, user_ctx) +function FormRes!(ptr_fx_g, ptr_x_g, user_ctx) # Copy global to local vectors - PETSc.DMGlobalToLocal(user_ctx.dm, x_g, PETSc.INSERT_VALUES, user_ctx.x_l) - PETSc.DMGlobalToLocal(user_ctx.dm, f_g, PETSc.INSERT_VALUES, user_ctx.f_l) - + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + PETSc.update!(user_ctx.f_l, ptr_fx_g, PETSc.INSERT_VALUES) + # Retrieve arrays from the local vectors ArrayLocal_x = PETSc.DMStagVecGetArrayRead(user_ctx.dm, user_ctx.x_l); # array with all local x-data (solution array) ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, user_ctx.f_l); # array with all local residual @@ -43,8 +40,8 @@ function FormRes!(f_g, x_g, user_ctx) Base.finalize(ArrayLocal_f) # Copy local into global residual vector - PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.f_l, PETSc.INSERT_VALUES, f_g) - + PETSc.update!( ptr_fx_g, user_ctx.f_l, PETSc.INSERT_VALUES) + end function ForwardDiff_res(x, user_ctx) @@ -75,7 +72,7 @@ function ComputeSparsityPatternJacobian_automatic(x_l, user_ctx) return jac, colors end -function f(out, x, user_ctx) +function func(out, x, user_ctx) ArrayLocal_x = PETSc.DMStagVecGetArray(user_ctx.dm, x); # array with all local x-data (solition array) ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, out); # array with all local residual @@ -85,7 +82,7 @@ function f(out, x, user_ctx) return nothing end -function FormJacobian!(cx_g, J, P, user_ctx) +function FormJacobian!(ptr_x_g, J, P, user_ctx) # This requires several steps: # # 1) Extract local vector from global solution (x) vector @@ -94,11 +91,11 @@ function FormJacobian!(cx_g, J, P, user_ctx) # 3) Store the matrix inside the global Petsc matrix J # Extract the local vector - PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) + #PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) x = PETSc.unsafe_localarray(Float64, user_ctx.x_l.ptr; write=false, read=true); # Check the sparsity pattern - if isnothing(user_ctx.jac) error("You first have to define the sparsity pattern of the jacobian") else @@ -107,7 +104,7 @@ function FormJacobian!(cx_g, J, P, user_ctx) end out = similar(x); - f_Res = ((out,x)->f(out, x, user_ctx)); # pass additional arguments into the routine + f_Res = ((out,x)->func(out, x, user_ctx)); # pass additional arguments into the routine if isnothing(user_ctx.jac_cache) # Allocate data required for jacobian computations @@ -120,8 +117,13 @@ function FormJacobian!(cx_g, J, P, user_ctx) # Store global Jacobian inside Petsc Matrix ind = PETSc.LocalInGlobalIndices(user_ctx.dm); - J .= jac[ind,ind]; - + if PETSc.assembled(J) == false + println("Assembling J") + J = PETSc.MatSeqAIJ(sparse(jac[ind,ind])); + else + J .= sparse(jac[ind,ind]); + end + # Store Julia matrix and coloring user_ctx.jac = jac; user_ctx.colors = colors; @@ -180,10 +182,11 @@ function ComputeLocalResidual(dm, ArrayLocal_x, ArrayLocal_f, user_ctx) dz = user_ctx.dz; # Getting global indices for the local vectors - sx, sn = PETSc.DMStagGetCentralNodes(user_ctx.dm); #indices of central points + ind = PETSc.DMStagGetIndices(dm); # indices of (center/element) points, not including ghost values. + #sx, sn = PETSc.DMStagGetCentralNodes(user_ctx.dm); #indices of central points - ix = sx[1]:sn[1]; - iz = sx[2]:sn[2]; + ix = ind.center[1]; + iz = ind.center[1]; # Horizontal force balance f(x) @@ -231,14 +234,16 @@ function ComputeStresses!(user_ctx, ArrayLocal_x) EtaC = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dmCoeff,user_ctx.coeff_l, PETSc.DMSTAG_DOWN_LEFT, 0); # Getting indices for center nodes (not ghost) - sx, sn = PETSc.DMStagGetCentralNodes(user_ctx.dm); #indices of central points - ix = sx[1]:sn[1]; - iz = sx[2]:sn[2]; - + ind = PETSc.DMStagGetIndices(user_ctx.dm); #indices of central points + #ix = sx[1]:sn[1]; + #iz = sx[2]:sn[2]; + ix = ind.center[1]; + iz = ind.center[1]; + # Compute shear stresses Txx = 2 .* EtaE[ix,iz] .* Exx; Tzz = 2 .* EtaE[ix,iz] .* Ezz; - Txz = 2 .* EtaC[sx[1]:sn[1]+1,sx[2]:sn[2]+1] .* Exz; + Txz = 2 .* EtaC[ix[1]:ix[end]+1,iz[1]:iz[end]+1] .* Exz; return Txx,Tzz,Txz @@ -258,9 +263,12 @@ function ComputeStrainRates!(user_ctx, ArrayLocal_x) dz = user_ctx.dz; # Getting indices for center nodes (not ghost) - sx, sn = PETSc.DMStagGetCentralNodes(user_ctx.dm); #indices of central points - ix = sx[1]:sn[1]; - iz = sx[2]:sn[2]; + ind = PETSc.DMStagGetIndices(user_ctx.dm); #indices of central points + #ix = sx[1]:sn[1]; + #iz = sx[2]:sn[2]; + ix = ind.center[1]; + iz = ind.center[2]; + # Set ghost points (free slip) Vx[:,iz[1]-1] .= Vx[:,iz[1]]; # ghost points on left and right size (here: free slip) @@ -272,24 +280,24 @@ function ComputeStrainRates!(user_ctx, ArrayLocal_x) DivV = (Vx[ix.+1,iz].-Vx[ix,iz])./dx .+ (Vz[ix,iz.+1].-Vz[ix,iz])./dz; Exx = (Vx[ix.+1,iz].-Vx[ix,iz])./dx .- 1/3 .* DivV; Ezz = (Vz[ix,iz.+1].-Vz[ix,iz])./dz .- 1/3 .* DivV; - Exz = 0.5.*((Vx[sx[1]:sn[1].+1,sx[2]:sn[2].+1].-Vx[sx[1]:sn[1].+1,sx[2].-1:sn[2]])./dz .+ - (Vz[sx[1]:sn[1].+1,sx[2]:sn[2].+1].-Vz[sx[1].-1:sn[1],sx[2]:sn[2].+1])./dx); + Exz = 0.5.*((Vx[ix[1]:ix[end].+1,iz[1]:iz[end].+1].-Vx[ix[1]:ix[end].+1,iz[1].-1:iz[end]])./dz .+ + (Vz[ix[1]:ix[end].+1,iz[1]:iz[end].+1].-Vz[ix[1].-1:ix[end],iz[1]:iz[end].+1])./dx); return Exx,Ezz,Exz end -function PopulateCoefficientData!(ctx) +function PopulateCoefficientData!(user_ctx) # Create coefficient local vector and array (to store eta and rho) - user_ctx.coeff_l = PETSc.DMCreateLocalVector(user_ctx.dmCoeff); + user_ctx.coeff_l = PETSc.createlocalvector(user_ctx.dmCoeff); coeff_array = PETSc.DMStagVecGetArray(user_ctx.dmCoeff,user_ctx.coeff_l); # Create Coordinates dm, local vector and array - dm_coord = PETSc.DMGetCoordinateDM(user_ctx.dmCoeff); - vec_coord = PETSc.DMGetCoordinatesLocal(user_ctx.dmCoeff); + dm_coord = PETSc.getcoordinateDM(user_ctx.dmCoeff); + vec_coord = PETSc.getcoordinateslocal(user_ctx.dmCoeff); coord = PETSc.DMStagVecGetArray(dm_coord, vec_coord); - start,n,nExtra = PETSc.DMStagGetCorners(user_ctx.dmCoeff); + # start,n,nExtra = PETSc.DMStagGetCorners(user_ctx.dmCoeff); # Get the correct entries for each of our variables in local element-wise storage iec = PETSc.DMStagGetLocationSlot(user_ctx.dmCoeff, PETSc.DMSTAG_DOWN_LEFT, 0); # location eta corner @@ -343,12 +351,16 @@ end function SetVecX!(user_ctx,x_g) # Set first guess for vector x to 0 user_ctx.x_l .= 0; - PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.x_l, PETSc.INSERT_VALUES, x_g); + #PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.x_l, PETSc.INSERT_VALUES, x_g); + PETSc.update!(x_g,user_ctx.x_l, PETSc.INSERT_VALUES); + end # Main Solver nx, nz = 32, 32; # number of nodes is x and z direction +#nx, nz = 8, 8; # number of nodes is x and z direction + user_ctx.xlim = [0,2]; # x and z dimensions user_ctx.zlim = [0,1]; xlim = user_ctx.xlim; @@ -363,7 +375,7 @@ user_ctx.gz = -1; # gravity magnitude user_ctx.kappa = 1e2; # incompressible penaly term # Create Solution and coefficient DMs -user_ctx.dm = PETSc.DMStagCreate2d(MPI.COMM_SELF,PETSc.DM_BOUNDARY_GHOSTED,PETSc.DM_BOUNDARY_GHOSTED,nx,nz,1,1,0,1,1,PETSc.DMSTAG_STENCIL_BOX,1); # V edge, P Element +user_ctx.dm = PETSc.DMStagCreate2d(petsclib,MPI.COMM_SELF,PETSc.DM_BOUNDARY_GHOSTED,PETSc.DM_BOUNDARY_GHOSTED,nx,nz,1,1,0,1,1,PETSc.DMSTAG_STENCIL_BOX,1); # V edge, P Element PETSc.DMStagSetUniformCoordinatesExplicit(user_ctx.dm, xlim[1], xlim[2], zlim[1], zlim[2]); # set coordinates user_ctx.dmCoeff = PETSc.DMStagCreateCompatibleDMStag(user_ctx.dm,2,0,1,0); # rho and eta on VERTEX, eta on ELEMENT PETSc.DMStagSetUniformCoordinatesExplicit(user_ctx.dmCoeff, xlim[1], xlim[2], zlim[1], zlim[2]); @@ -372,22 +384,27 @@ PETSc.DMStagSetUniformCoordinatesExplicit(user_ctx.dmCoeff, xlim[1], xlim[2], zl PopulateCoefficientData!(user_ctx); # Create solution and residual vectors -x_g = PETSc.DMCreateGlobalVector(user_ctx.dm); -f_g = PETSc.DMCreateGlobalVector(user_ctx.dm); -user_ctx.x_l = PETSc.DMCreateLocalVector(user_ctx.dm); -user_ctx.f_l = PETSc.DMCreateLocalVector(user_ctx.dm); +x_g = PETSc.createglobalvector(user_ctx.dm); +f_g = PETSc.createglobalvector(user_ctx.dm); +user_ctx.x_l = PETSc.createlocalvector(user_ctx.dm); +user_ctx.f_l = PETSc.createlocalvector(user_ctx.dm); # Create Petsc matrix for Jacobian -J = PETSc.DMCreateMatrix(user_ctx.dm); +J = PETSc.creatematrix(user_ctx.dm); # Set first guess values for solution vector SetVecX!(user_ctx,x_g); # Compute jacobian sparcity pattern -user_ctx.jac, user_ctx.colors = ComputeSparsityPatternJacobian_automatic(user_ctx.x_l.array, user_ctx); +x_l = PETSc.unsafe_localarray(Float64, user_ctx.x_l.ptr; write=false, read=true) + +user_ctx.jac, user_ctx.colors = ComputeSparsityPatternJacobian_automatic(user_ctx.x_l, user_ctx); +ind = PETSc.LocalInGlobalIndices(user_ctx.dm); # extract indices +J = PETSc.MatSeqAIJ(sparse(user_ctx.jac[ind,ind])); + #Setting up SNES -S = PETSc.SNES{Float64}(MPI.COMM_SELF; +S = PETSc.SNES{Float64}(petsclib,MPI.COMM_SELF; snes_rtol=1e-12, snes_monitor=true, snes_max_it = 500, @@ -403,7 +420,7 @@ PETSc.setjacobian!(S, FormJacobian!, J, J) PETSc.solve!(x_g, S); # Copy solution to local vector -PETSc.DMGlobalToLocal(user_ctx.dm, x_g, PETSc.INSERT_VALUES, user_ctx.x_l); +PETSc.update!(user_ctx.x_l, x_g, PETSc.INSERT_VALUES); # Extract solution Vx = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm, user_ctx.x_l, PETSc.DMSTAG_LEFT, 0); @@ -412,13 +429,13 @@ P = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm, user_ctx.x_l, PETSc Eta = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dmCoeff, user_ctx.coeff_l, PETSc.DMSTAG_ELEMENT, 0); # Getting indices for center nodes (not ghost) -sx, sn = PETSc.DMStagGetCentralNodes(user_ctx.dm); -ix = sx[1]:sn[1]; -iz = sx[2]:sn[2]; +ind = PETSc.DMStagGetIndices(user_ctx.dm); +ix = ind.center[1]; +iz = ind.center[2]; # Getting rid of ghost points -Vx = Vx[sx[1]:sn[1]+1,iz]; -Vz = Vz[ix,sx[2]:sn[2]+1]; +Vx = Vx[ix[1]:ix[end]+1,iz]; +Vz = Vz[ix,iz[1]:iz[end]+1]; P = P[ix,iz]; Eta = Eta[ix,iz]; @@ -427,8 +444,8 @@ Vx_cen = (Vx[2:end,:] + Vx[1:end-1,:])/2; Vz_cen = (Vz[:,2:end] + Vz[:,1:end-1])/2; #get coordinates -dm_coord = PETSc.DMGetCoordinateDM(user_ctx.dmCoeff); -vec_coord = PETSc.DMGetCoordinatesLocal(user_ctx.dmCoeff); +dm_coord = PETSc.getcoordinateDM(user_ctx.dmCoeff); +vec_coord = PETSc.getcoordinateslocal(user_ctx.dmCoeff); XCoord_e = PETSc.DMStagGetGhostArrayLocationSlot(dm_coord, vec_coord, PETSc.DMSTAG_ELEMENT , 0); # location coord corner ZCoord_e = PETSc.DMStagGetGhostArrayLocationSlot(dm_coord, vec_coord, PETSc.DMSTAG_ELEMENT , 1); # location coord corner @@ -437,8 +454,8 @@ ZCoord_c = PETSc.DMStagGetGhostArrayLocationSlot(dm_coord, vec_coord, PETSc.DM XCoord_e = XCoord_e[ix,iz]; ZCoord_e = ZCoord_e[ix,iz]; -XCoord_c = XCoord_c[sx[1]:sn[1]+1,sx[2]:sn[2]+1]; -ZCoord_c = ZCoord_c[sx[1]:sn[1]+1,sx[2]:sn[2]+1]; +XCoord_c = XCoord_c[ix[1]:ix[end]+1,iz[1]:iz[end]+1]; +ZCoord_c = ZCoord_c[ix[1]:ix[end]+1,iz[1]:iz[end]+1]; xe_1D = XCoord_e[:,1]; ze_1D = ZCoord_e[1,:]; @@ -446,14 +463,14 @@ xc_1D = XCoord_c[:,1]; zc_1D = ZCoord_c[1,:]; # Plot -CreatePlots = true; # set to false by default such that testing does not require Plots +CreatePlots = false; # set to false by default such that testing does not require Plots if CreatePlots==true using Plots p1 = heatmap(xe_1D,ze_1D, P', xlabel="Width", ylabel="Depth", title="Pressure",aspect_ratio = 1) p2 = heatmap(xe_1D,zc_1D, Vz', xlabel="Width", ylabel="Depth", title="Vz",aspect_ratio = 1) p3 = heatmap(xc_1D,ze_1D, Vx', xlabel="Width", ylabel="Depth", title="Vx",aspect_ratio = 1) - #quiver!(XCoord_e[:],ZCoord_e[:],quiver=(Vx_cen[:]*0.02,Vz_cen[:]*0.02), color=:white,aspect_ratio = 1) + quiver!(XCoord_e[:],ZCoord_e[:],quiver=(Vx_cen[:]*0.02,Vz_cen[:]*0.02), color=:white,aspect_ratio = 1) p4 = heatmap(xe_1D,ze_1D, Eta', xlabel="Width", ylabel="Depth", title="Eta",aspect_ratio = 1) plot(p1, p2, p3, p4, layout = (2, 2), legend = false) diff --git a/examples/DMSTAG_porwave_1D.jl b/examples/DMSTAG_porwave_1D.jl index 518f8c4e..5303cb6d 100644 --- a/examples/DMSTAG_porwave_1D.jl +++ b/examples/DMSTAG_porwave_1D.jl @@ -1,7 +1,3 @@ -# EXCLUDE FROM TESTING -# NOTE: This is temporarily not working until we merge the DMSTAG routines with the new Clang branch -# -# # This is an example of a 1D viscoelastic porosity wave as described in # Vasyliev et al. Geophysical Research Letters (25), 17. p. 3239-3242 # https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/98GL52358 @@ -16,7 +12,8 @@ using PETSc, MPI using SparseArrays, SparseDiffTools, ForwardDiff -PETSc.initialize() +petsclib = PETSc.petsclibs[1]; +PETSc.initialize(petsclib) CreatePlots = false; if CreatePlots==true @@ -24,14 +21,14 @@ if CreatePlots==true end -function FormRes!(cfx_g, cx_g, user_ctx) +function FormRes!(ptr_fx_g, ptr_x_g, user_ctx) # Note that in PETSc, cx and cfx are pointers to global vectors. # Copy global to local vectors - PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) - PETSc.DMGlobalToLocal(user_ctx.dm, cfx_g, PETSc.INSERT_VALUES, user_ctx.f_l) - + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + PETSc.update!(user_ctx.f_l, ptr_fx_g, PETSc.INSERT_VALUES) + # Retrieve arrays from the local vectors ArrayLocal_x = PETSc.DMStagVecGetArrayRead(user_ctx.dm, user_ctx.x_l); # array with all local x-data ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, user_ctx.f_l); # array with all local residual @@ -44,8 +41,8 @@ function FormRes!(cfx_g, cx_g, user_ctx) Base.finalize(ArrayLocal_f) # Copy local into global residual vector - PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.f_l, PETSc.INSERT_VALUES, cfx_g) - + PETSc.update!( ptr_fx_g, user_ctx.f_l, PETSc.INSERT_VALUES) + end function ForwardDiff_res(x, user_ctx) @@ -62,7 +59,8 @@ function ForwardDiff_res(x, user_ctx) return f; end -function f(out, x, user_ctx) + +function func(out, x, user_ctx) ArrayLocal_x = PETSc.DMStagVecGetArray(user_ctx.dm, x); # array with all local x-data ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, out); # array with all local residual @@ -74,18 +72,17 @@ end -function FormJacobian!(cx_g, J, P, user_ctx) +function FormJacobian!(ptr_x_g, J, P, user_ctx) # This requires several steps: # # 1) Extract local vector from global solution (x) vector # 2) Compute local jacobian from the residual routine (note that # this routine requires julia vectors as input) - # Extract the local vector - PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) + # Extract the local vector from pointer to global vector + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) x = PETSc.unsafe_localarray(Float64, user_ctx.x_l.ptr; write=false, read=true) - if isnothing(user_ctx.jac) # Compute sparsity pattern of jacobian. This is relatvely slow, but only has to be done once. # Theoretically, more efficient tools for this exists (jacobian_sparsity in the SparsityDetection.jl package), @@ -100,20 +97,23 @@ function FormJacobian!(cx_g, J, P, user_ctx) else jac = user_ctx.jac; colors = user_ctx.colors; - end out = similar(x); - f_Res = ((out,x)->f(out, x, user_ctx)); # pass additional arguments into the routine + f_Res = ((out,x)->func(out, x, user_ctx)); # pass additional arguments into the routine forwarddiff_color_jacobian!(jac, f_Res, x, colorvec = colors) - ind = PETSc.LocalInGlobalIndices(user_ctx.dm); # extr - J .= jac[ind,ind]; + ind = PETSc.LocalInGlobalIndices(user_ctx.dm); # extract indices + if PETSc.assembled(J) == false + J = PETSc.MatSeqAIJ(sparse(jac[ind,ind])); + else + J .= sparse(jac[ind,ind]); + end user_ctx.jac = jac; user_ctx.colors = colors; - return jac, ind + return jac[ind,ind], ind end # Define a struct that holds data we need in the local SNES routines below @@ -149,22 +149,23 @@ function ComputeLocalResidual(dm, ArrayLocal_x, ArrayLocal_f, user_ctx) res_Pe = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_f, PETSc.DMSTAG_ELEMENT, 1); # compute the FD stencil - ind = PETSc.DMStagGetCentralNodes(dm); # indices of (center/element) points, not including ghost values. - + ind = PETSc.DMStagGetIndices(dm); # indices of (center/element) points, not including ghost values. + # Porosity residual @ center points - iz = ind.Center.lower[1]+1:ind.Center.upper[1]-1; # Phi is on center points - res_Phi[iz[1]-1] = Phi[iz[1]-1] - 1.0; # Bottom BC - res_Phi[iz[end]+1] = Phi[iz[end]+1] - 1.0; # Top BC - res_Phi[iz] = (Phi[iz] - Phi_old[iz])/dt + De.*(Pe[iz]-Pe_old[iz])/dt + (Phi[iz].^m) .* Pe[iz] + iz = ind.center[1]; # Phi is on center points + iz_c = iz[2:end-1]; # central points + res_Phi[iz[1]] = Phi[iz[1]] - 1.0; # Bottom BC + res_Phi[iz[end]] = Phi[iz[end]] - 1.0; # Top BC + res_Phi[iz_c] = (Phi[iz_c] - Phi_old[iz_c])/dt + De.*(Pe[iz_c]-Pe_old[iz_c])/dt + (Phi[iz_c].^m) .* Pe[iz_c] - # Pressure update @ nodal points - iz = ind.Vertex.lower[1]+1:ind.Vertex.upper[1]-1; - #iz = sx[1]+1:sn[1]-1; # Pe is on center points as well (dof=2) - res_Pe[iz[1]-1] = Pe[iz[1]-1] - 0.; # Bottom BC - res_Pe[iz[end]+1] = Pe[iz[end]+1] - 0.; # Top BC - res_Pe[iz] = De.*(Pe[iz]-Pe_old[iz])/dt - ( ((0.5*(Phi[iz .+ 1] + Phi[iz .+ 0])).^n) .* ( (Pe[iz .+ 1] - Pe[iz ])/dz .+ 1.0) - - ((0.5*(Phi[iz .- 1] + Phi[iz .+ 0])).^n) .* ( (Pe[iz ] - Pe[iz .- 1])/dz .+ 1.0))/dz + - (Phi[iz].^m) .* Pe[iz]; + # Pressure update @ center points + iz = ind.center[1]; + iz_c = iz[2:end-1]; + res_Pe[iz[1]] = Pe[iz[1]] - 0.; # Bottom BC + res_Pe[iz[end]] = Pe[iz[end]] - 0.; # Top BC + res_Pe[iz_c] = De.*(Pe[iz_c]-Pe_old[iz_c])/dt - ( ((0.5*(Phi[iz_c .+ 1] + Phi[iz_c .+ 0])).^n) .* ( (Pe[iz_c .+ 1] - Pe[iz_c ])/dz .+ 1.0) - + ((0.5*(Phi[iz_c .- 1] + Phi[iz_c .+ 0])).^n) .* ( (Pe[iz_c ] - Pe[iz_c .- 1])/dz .+ 1.0))/dz + + (Phi[iz_c].^m) .* Pe[iz_c]; # Cleanup Base.finalize(Phi); Base.finalize(Phi_old); @@ -177,8 +178,8 @@ function SetInitialPerturbations(user_ctx, x_g) # Computes the initial perturbations as in the paper # Retrieve coordinates from DMStag - DMcoord = PETSc.DMGetCoordinateDM(user_ctx.dm) - vec_coord = PETSc.DMGetCoordinatesLocal(user_ctx.dm); + DMcoord = PETSc.getcoordinateDM(user_ctx.dm) + vec_coord = PETSc.getcoordinateslocal(user_ctx.dm); Coord = PETSc.DMStagVecGetArray(DMcoord, vec_coord); Z_cen = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,Coord, PETSc.DMSTAG_ELEMENT, 0); # center (has 1 extra) user_ctx.dz = Z_cen[2]-Z_cen[1]; @@ -199,7 +200,8 @@ function SetInitialPerturbations(user_ctx, x_g) Pe_old .= -dPe1 .*exp.( -((Z_cen .- z1).^2.0)/lambda^2) - dPe2.*exp.( -((Z_cen .- z2).^2.0)/lambda^2); # Copy local into global residual vector - PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.x_l, PETSc.INSERT_VALUES, x_g) + #PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.x_l, PETSc.INSERT_VALUES, x_g) + PETSc.update!(x_g, user_ctx.x_l, PETSc.INSERT_VALUES); # send back coordinates return Z_cen @@ -214,27 +216,29 @@ L = 150; user_ctx.De = 1e2; user_ctx.dt = 1e-2; # Note that the timestep has to be tuned a bit depending on De in order to obtain convergence -user_ctx.dm = PETSc.DMStagCreate1d(MPI.COMM_SELF,PETSc.DM_BOUNDARY_NONE,nx,0,2); # both Phi and Pe are on center points +user_ctx.dm = PETSc.DMStagCreate1d(petsclib, MPI.COMM_WORLD,PETSc.DM_BOUNDARY_NONE,nx,0,2); # both Phi and Pe are on center points PETSc.DMStagSetUniformCoordinatesExplicit(user_ctx.dm, -20, L) # set coordinates -x_g = PETSc.DMCreateGlobalVector(user_ctx.dm) +x_g = PETSc.createglobalvector(user_ctx.dm) -f_g = PETSc.DMCreateGlobalVector(user_ctx.dm) -user_ctx.x_l = PETSc.DMCreateLocalVector(user_ctx.dm) -user_ctx.xold_l = PETSc.DMCreateLocalVector(user_ctx.dm) -user_ctx.xold_g = PETSc.DMCreateGlobalVector(user_ctx.dm) -user_ctx.f_l = PETSc.DMCreateLocalVector(user_ctx.dm) -J = PETSc.DMCreateMatrix(user_ctx.dm); # Jacobian from DMStag +f_g = PETSc.createglobalvector(user_ctx.dm) +user_ctx.x_l = PETSc.createlocalvector(user_ctx.dm) +user_ctx.xold_l = PETSc.createlocalvector(user_ctx.dm) +user_ctx.xold_g = PETSc.createglobalvector(user_ctx.dm) +user_ctx.f_l = PETSc.createlocalvector(user_ctx.dm) +J = PETSc.creatematrix(user_ctx.dm); # Jacobian from DMStag # initial non-zero structure of jacobian Z_cen = SetInitialPerturbations(user_ctx, x_g) -x0 = PETSc.VecSeq(rand(size(x_g,1))); -J_julia,ind = FormJacobian!(x0.ptr, J, J, user_ctx) +x0 = PETSc.createglobalvector(user_ctx.dm); +x0[1:length(x0)] .= 1 +J_julia,ind = FormJacobian!(x0, J, J, user_ctx) +J = PETSc.MatSeqAIJ(J_julia) -S = PETSc.SNES{Float64}(MPI.COMM_SELF; +S = PETSc.SNES{Float64}(petsclib, MPI.COMM_SELF; snes_rtol=1e-12, snes_monitor=true, snes_max_it = 500, @@ -269,14 +273,14 @@ while time<2.5 # Update old local values user_ctx.xold_g = x_g; - PETSc.DMGlobalToLocal(user_ctx.dm, x_g, PETSc.INSERT_VALUES, user_ctx.x_l) - PETSc.DMGlobalToLocal(user_ctx.dm, user_ctx.xold_g, PETSc.INSERT_VALUES, user_ctx.xold_l) + PETSc.update!(user_ctx.x_l, x_g, PETSc.INSERT_VALUES, ) + PETSc.update!(user_ctx.xold_l, user_ctx.xold_g, PETSc.INSERT_VALUES, ) # Update time time += user_ctx.dt; it += 1; - if (mod(it,200)==0) & (CreatePlots==true) # Visualisation + if (mod(it,20)==0) & (CreatePlots==true) # Visualisation # Extract values and plot Phi = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,user_ctx.x_l, PETSc.DMSTAG_ELEMENT, 0); Pe = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,user_ctx.x_l, PETSc.DMSTAG_ELEMENT, 1); diff --git a/examples/DMSTAG_porwave_2D.jl b/examples/DMSTAG_porwave_2D.jl index 6492a7cb..c626b9c9 100644 --- a/examples/DMSTAG_porwave_2D.jl +++ b/examples/DMSTAG_porwave_2D.jl @@ -1,6 +1,5 @@ # EXCLUDE FROM TESTING -# NOTE: This is temporarily not working until we merge the DMSTAG routines with the new Clang branch -# +# Note: this is excluded as it would slow doen the tests too much # # This is an example of a 2D viscoelastic porosity wave as described in # Vasyliev et al. Geophysical Research Letters (25), 17. p. 3239-3242 @@ -15,17 +14,17 @@ using PETSc, MPI using Plots using SparseArrays, SparseDiffTools, ForwardDiff -PETSc.initialize() - +petsclib = PETSc.petsclibs[1]; +PETSc.initialize(petsclib) -function FormRes!(cfx_g, cx_g, user_ctx) +function FormRes!(ptr_fx_g, ptr_x_g, user_ctx) # Note that in PETSc, cx and cfx are pointers to global vectors. # Copy global to local vectors - PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) - PETSc.DMGlobalToLocal(user_ctx.dm, cfx_g, PETSc.INSERT_VALUES, user_ctx.f_l) - + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + PETSc.update!(user_ctx.f_l, ptr_fx_g, PETSc.INSERT_VALUES) + # Retrieve arrays from the local vectors ArrayLocal_x = PETSc.DMStagVecGetArrayRead(user_ctx.dm, user_ctx.x_l); # array with all local x-data ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, user_ctx.f_l); # array with all local residual @@ -38,7 +37,8 @@ function FormRes!(cfx_g, cx_g, user_ctx) Base.finalize(ArrayLocal_f) # Copy local into global residual vector - PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.f_l, PETSc.INSERT_VALUES, cfx_g) + PETSc.update!(ptr_fx_g, user_ctx.f_l, PETSc.INSERT_VALUES) + #PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.f_l, PETSc.INSERT_VALUES, cfx_g) end @@ -56,7 +56,7 @@ function ForwardDiff_res(x, user_ctx) return f; end -function f(out, x, user_ctx) +function func(out, x, user_ctx) ArrayLocal_x = PETSc.DMStagVecGetArray(user_ctx.dm, x); # array with all local x-data ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, out); # array with all local residual @@ -67,7 +67,7 @@ function f(out, x, user_ctx) end -function FormJacobian!(cx_g, J, P, user_ctx) +function FormJacobian!(ptr_x_g, J, P, user_ctx) # This requires several steps: # # 1) Extract local vector from global solution (x) vector @@ -75,30 +75,37 @@ function FormJacobian!(cx_g, J, P, user_ctx) # this routine requires julia vectors as input) # Extract the local vector - PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) x = PETSc.unsafe_localarray(Float64, user_ctx.x_l.ptr; write=false, read=true) if isnothing(user_ctx.jac) - error("You first have to define the sparsity pattern of the jacobian") + # Compute sparsity pattern of jacobian. This is relatvely slow, but only has to be done once. + # Theoretically, more efficient tools for this exists (jacobian_sparsity in the SparsityDetection.jl package), + # but they don't seem to work with the PETSc approach we use. Therefore we employ + f_Residual = (x -> ForwardDiff_res(x, user_ctx)); # pass additional arguments into the routine + J_julia = ForwardDiff.jacobian(f_Residual,x); + + # employ sparse structure to compute jacobian - to be moved inside routines + jac = sparse(J_julia); + colors = matrix_colors(jac) # the number of nonzeros per row + else jac = user_ctx.jac; colors = user_ctx.colors; end out = similar(x); - f_Res = ((out,x)->f(out, x, user_ctx)); # pass additional arguments into the routine + f_Res = ((out,x)->func(out, x, user_ctx)); # pass additional arguments into the routine + forwarddiff_color_jacobian!(jac, f_Res, x, colorvec = colors) - if isnothing(user_ctx.jac_cache) - # Allocate data required for jacobian computations - user_ctx.jac_cache = ForwardColorJacCache(f_Res,x; colorvec=colors, sparsity = jac); + ind = PETSc.LocalInGlobalIndices(user_ctx.dm); # extract indices + if PETSc.assembled(J) == false + println("Assembling J") + J = PETSc.MatSeqAIJ(sparse(jac[ind,ind])); + else + J .= sparse(jac[ind,ind]); end - @time forwarddiff_color_jacobian!(jac, f_Res, x, user_ctx.jac_cache); - - ind = PETSc.LocalInGlobalIndices(user_ctx.dm); # extr - J .= jac[ind,ind]; - - user_ctx.jac = jac; user_ctx.colors = colors; @@ -123,12 +130,13 @@ mutable struct Data_PorWav2D end user_ctx = Data_PorWav2D(nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing); # holds data we need in the local -function ComputeSparsityPatternJacobian(x_l, user_ctx) +function ComputeSparsityPatternJacobian(x_l_vec, user_ctx) # This computes the sparsity pattern of our jacobian by hand. # That is signficantly faster than the automatic method, yet you will have to analyze your residual routine for it. # # As you will see, there are however any similaries between the two routines, so it is usually not all that difficult # to define this + x_l = PETSc.unsafe_localarray(Float64, x_l_vec.ptr; write=false, read=true) AddInd!(i, array) = append!(i,vec(array)); # small helper routine for readability of the lines below @@ -144,11 +152,11 @@ function ComputeSparsityPatternJacobian(x_l, user_ctx) res_Pe = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,ArrayLocal_f, PETSc.DMSTAG_ELEMENT, 1); # compute the FD stencil - sx, sn = PETSc.DMStagGetCentralNodes(user_ctx.dm); # indices of (center/element) points, not including ghost values. + ind = PETSc.DMStagGetIndices(user_ctx.dm); # indices of (center/element) points, not including ghost values. # Porosity residual @ center points - ix = sx[1]:sn[1]; - iz = sx[2]:sn[2]; + ix = ind.center[1]; + iz = ind.center[2]; #res_Phi[ix,iz] = (Phi[ix,iz] - Phi_old[ix,iz])/dt + De.*(Pe[ix,iz]-Pe_old[ix,iz])/dt + (Phi[ix,iz].^m) .* Pe[ix,iz] i,j = Vector{Int64}(), Vector{Int64}() @@ -156,14 +164,18 @@ function ComputeSparsityPatternJacobian(x_l, user_ctx) AddInd!(i,res_Phi[ix,iz]), AddInd!(j,Pe[ix,iz]); # new non-zeros because of (Phi[ix,iz].^m) .* Pe[ix,iz] term # Pressure update @ nodal points - ix = sx[1]:sn[1]; # lateral BC's are set using ghost points - iz = sx[2]+1:sn[2]-1; # constant Pe on top and bottom, so this is only for center points + ix = ind.center[1]; # lateral BC's are set using ghost points + iz = ind.center[2]; # constant Pe on top and bottom, so this is only for center points + ix_c = ix[1:end]; + iz_c = iz[2:end-1]; + + Pe[ix[1]-1,:] = Pe[ix[1],:]; # ghost points on left and right size (here: flux free) Pe[ix[end]+1,:] = Pe[ix[end],:]; # BC's - AddInd!(i, res_Pe[:,iz[1]-1]), AddInd!(j,Pe[:,iz[1]-1]); # bottom BC - AddInd!(i, res_Pe[:,iz[end]+1]), AddInd!(j,Pe[:,iz[end]+1]); # top BC + AddInd!(i, res_Pe[:,iz[1]]), AddInd!(j,Pe[:,iz[1]]); # bottom BC + AddInd!(i, res_Pe[:,iz[end]]), AddInd!(j,Pe[:,iz[end]]); # top BC # res_Pe[ix,iz] = De*( Pe[ix,iz] - Pe_old[ix,iz])/dt + @@ -172,16 +184,16 @@ function ComputeSparsityPatternJacobian(x_l, user_ctx) # ((0.5.*(Phi[ix,iz .- 1] + Phi[ix ,iz ])).^n) .* ( (Pe[ix,iz ] - Pe[ix, iz .- 1])/dz .+ 1.0))/dz) # - (( ((0.5.*(Phi[ix,iz ] + Phi[ix.+ 1,iz ])).^n) .* ( (Pe[ix.+ 1,iz ] - Pe[ix, iz ])/dx ) - # ((0.5.*(Phi[ix,iz ] + Phi[ix.- 1,iz ])).^n) .* ( (Pe[ix ,iz ] - Pe[ix.- 1,iz ])/dx ))/dx) - AddInd!(i, res_Pe[ix,iz]), AddInd!(j,Phi[ix,iz]); # new part of (Phi[ix,iz].^m).* Pe[ix,iz] - AddInd!(i, res_Pe[ix,iz]), AddInd!(j,Phi[ix,iz .+ 1 ]); # Phi[ix,iz .+ 1] - AddInd!(i, res_Pe[ix,iz]), AddInd!(j,Phi[ix,iz .- 1 ]); # - AddInd!(i, res_Pe[ix,iz]), AddInd!(j,Phi[ix .- 1, iz]); # - AddInd!(i, res_Pe[ix,iz]), AddInd!(j,Phi[ix .+ 1, iz]); # - AddInd!(i, res_Pe[ix,iz]), AddInd!(j, Pe[ix,iz] ); # De*( Pe[ix,iz] - Pe_old[ix,iz])/dt - AddInd!(i, res_Pe[ix,iz]), AddInd!(j, Pe[ix .+ 1, iz]); # Pe[ix.+ 1,iz ] - AddInd!(i, res_Pe[ix,iz]), AddInd!(j, Pe[ix .- 1, iz]); # Pe[ix.- 1,iz ] - AddInd!(i, res_Pe[ix,iz]), AddInd!(j, Pe[ix, iz .- 1]); # Pe[ix ,iz - 1] - AddInd!(i, res_Pe[ix,iz]), AddInd!(j, Pe[ix, iz .+ 1]); # Pe[ix ,iz + 1] + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j,Phi[ix_c,iz_c]); # new part of (Phi[ix,iz].^m).* Pe[ix,iz] + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j,Phi[ix_c,iz_c .+ 1 ]); # Phi[ix,iz .+ 1] + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j,Phi[ix_c,iz_c .- 1 ]); # + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j,Phi[ix_c .- 1, iz_c]); # + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j,Phi[ix_c .+ 1, iz_c]); # + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j, Pe[ix_c,iz_c] ); # De*( Pe[ix,iz] - Pe_old[ix,iz])/dt + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j, Pe[ix_c .+ 1, iz_c]); # Pe[ix.+ 1,iz ] + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j, Pe[ix_c .- 1, iz_c]); # Pe[ix.- 1,iz ] + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j, Pe[ix_c, iz_c .- 1]); # Pe[ix ,iz - 1] + AddInd!(i, res_Pe[ix_c,iz_c]), AddInd!(j, Pe[ix_c, iz_c .+ 1]); # Pe[ix ,iz + 1] jac = sparse(i,j,ones(Float64,length(i[:])),n,n); # create the sparse jacobian colors = matrix_colors(jac) # the number of nonzeros per row @@ -221,28 +233,31 @@ function ComputeLocalResidual(dm, ArrayLocal_x, ArrayLocal_f, user_ctx) res_Pe = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_f, PETSc.DMSTAG_ELEMENT, 1); # compute the FD stencil - sx, sn = PETSc.DMStagGetCentralNodes(dm); # indices of (center/element) points, not including ghost values. + ind = PETSc.DMStagGetIndices(dm); # indices of (center/element) points, not including ghost values. # Porosity residual @ center points - ix = sx[1]:sn[1]; - iz = sx[2]:sn[2]; - res_Phi[ix,iz] = (Phi[ix,iz] - Phi_old[ix,iz])/dt + De.*(Pe[ix,iz]-Pe_old[ix,iz])/dt + (Phi[ix,iz].^m) .* Pe[ix,iz] - - # Pressure update @ nodal points - ix = sx[1]:sn[1]; # lateral BC's are set using ghost points - iz = sx[2]+1:sn[2]-1; # constant Pe on top and bottom, so this is only for center points + ix = ind.center[1]; + iz = ind.center[2]; + ix_c = ix[1:end]; + iz_c = iz[1:end]; + res_Phi[ix_c,iz_c] = (Phi[ix_c,iz_c] - Phi_old[ix_c,iz_c])/dt + De.*(Pe[ix_c,iz_c]-Pe_old[ix_c,iz_c])/dt + (Phi[ix_c,iz_c].^m) .* Pe[ix_c,iz_c] + + # Pressure update @ center points + ix = ind.center[1]; # lateral BC's are set using ghost points + iz = ind.center[2]; # constant Pe on top and bottom, so this is only for center points + ix_c = ix[1:end]; + iz_c = iz[2:end-1]; Pe[ix[1]-1,:] = Pe[ix[1],:]; # ghost points on left and right size (here: flux free) - Pe[ix[end]+1,:] = Pe[ix[end],:]; - res_Pe[:,iz[1]-1] = Pe[:,iz[1]-1] .- 0.; # bottom BC - res_Pe[:,iz[end]+1] = Pe[:,iz[end]+1] .- 0.; # top BC - - - res_Pe[ix,iz] = De*( Pe[ix,iz] - Pe_old[ix,iz])/dt + - (Phi[ix,iz].^m).* Pe[ix,iz] - - (( ((0.5.*(Phi[ix,iz .+ 1] + Phi[ix ,iz ])).^n) .* ( (Pe[ix,iz .+ 1] - Pe[ix, iz ])/dz .+ 1.0) - - ((0.5.*(Phi[ix,iz .- 1] + Phi[ix ,iz ])).^n) .* ( (Pe[ix,iz ] - Pe[ix, iz .- 1])/dz .+ 1.0))/dz) - - (( ((0.5.*(Phi[ix,iz ] + Phi[ix.+ 1,iz ])).^n) .* ( (Pe[ix.+ 1,iz ] - Pe[ix, iz ])/dx ) - - ((0.5.*(Phi[ix,iz ] + Phi[ix.- 1,iz ])).^n) .* ( (Pe[ix ,iz ] - Pe[ix.- 1,iz ])/dx ))/dx) + Pe[ix[end]+1 ,:] = Pe[ix[end],:]; + res_Pe[:,iz[1]] = Pe[:,iz[1]] .- 0.; # bottom BC + res_Pe[:,iz[end]] = Pe[:,iz[end]] .- 0.; # top BC + + res_Pe[ix_c,iz_c] = De*( Pe[ix_c,iz_c] - Pe_old[ix_c,iz_c])/dt + + (Phi[ix_c,iz_c].^m).* Pe[ix_c,iz_c] - + (( ((0.5.*(Phi[ix_c,iz_c .+ 1] + Phi[ix_c ,iz_c ])).^n) .* ( (Pe[ix_c,iz_c .+ 1] - Pe[ix_c, iz_c ])/dz .+ 1.0) - + ((0.5.*(Phi[ix_c,iz_c .- 1] + Phi[ix_c ,iz_c ])).^n) .* ( (Pe[ix_c,iz_c ] - Pe[ix_c, iz_c .- 1])/dz .+ 1.0))/dz); # - + (( ((0.5.*(Phi[ix_c,iz_c ] + Phi[ix_c.+ 1,iz_c ])).^n) .* ( (Pe[ix_c.+ 1,iz_c ] - Pe[ix_c, iz_c ])/dx ) - + ((0.5.*(Phi[ix_c,iz_c ] + Phi[ix_c.- 1,iz_c ])).^n) .* ( (Pe[ix_c ,iz_c ] - Pe[ix_c.- 1,iz_c ])/dx ))/dx) # remark to the lines above: these are long, multi line, expressions. For code readability, I chopped them up over several lines. @@ -260,8 +275,8 @@ function SetInitialPerturbations(user_ctx, x_g) # Computes the initial perturbations as in the paper # Retrieve coordinates from DMStag - DMcoord = PETSc.DMGetCoordinateDM(user_ctx.dm) - vec_coord = PETSc.DMGetCoordinatesLocal(user_ctx.dm); + DMcoord = PETSc.getcoordinateDM(user_ctx.dm) + vec_coord = PETSc.getcoordinateslocal(user_ctx.dm); Coord = PETSc.DMStagVecGetArray(DMcoord, vec_coord); X_cen = Coord[:,:,1]; Z_cen = Coord[:,:,2]; @@ -275,7 +290,6 @@ function SetInitialPerturbations(user_ctx, x_g) Phi_old = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,user_ctx.xold_l, PETSc.DMSTAG_ELEMENT, 0); Pe_old = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,user_ctx.xold_l, PETSc.DMSTAG_ELEMENT, 1); - Phi0 =1.0; dPhi1=8.0; dPhi2=1.0; z1=0.0; z2=40.0; x1=0.0; x2=0.0; lambda=1.0 dPe1 = dPhi1/user_ctx.De; dPe2 = dPhi2/user_ctx.De; @@ -287,7 +301,7 @@ function SetInitialPerturbations(user_ctx, x_g) Pe_old .= -dPe1 .*exp.( -((Z_cen .- z1).^2.0)/lambda^2) - dPe2.*exp.( -((Z_cen .- z2).^2.0)/lambda^2); # Copy local into global residual vector - PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.x_l, PETSc.INSERT_VALUES, x_g) + PETSc.update!(x_g, user_ctx.x_l, PETSc.INSERT_VALUES); # send back coordinates (mainly for plotting) return X_cen, Z_cen @@ -296,25 +310,32 @@ end # Main solver nx, nz = 100, 500; +#nx, nz = 6, 5; W, L = 100,150; -user_ctx.De = 1e-1; # Deborah number -user_ctx.dt = 1e-5; # Note that the timestep has to be tuned a bit depending on the +#user_ctx.De = 1e-1; # Deborah number +#user_ctx.dt = 1e-5; # Note that the timestep has to be tuned a bit depending on the + +user_ctx.De = 1e2; +user_ctx.dt = 1e-2; # Note that the timestep has to be tuned a bit depending on De in order to obtain convergence # Both Pe and Phi are @ defined centers -user_ctx.dm = PETSc.DMStagCreate2d(MPI.COMM_SELF,PETSc.DM_BOUNDARY_GHOSTED,PETSc.DM_BOUNDARY_NONE,nx,nz,1,1,0,0,2,PETSc.DMSTAG_STENCIL_BOX,1) +user_ctx.dm = PETSc.DMStagCreate2d(petsclib,MPI.COMM_WORLD, + PETSc.DM_BOUNDARY_GHOSTED, + PETSc.DM_BOUNDARY_NONE, + nx,nz,1,1,0,0,2,PETSc.DMSTAG_STENCIL_BOX,1) PETSc.DMStagSetUniformCoordinatesExplicit(user_ctx.dm, -W/2, W/2, -20, L) # set coordinates -x_g = PETSc.DMCreateGlobalVector(user_ctx.dm) +x_g = PETSc.createglobalvector(user_ctx.dm) -f_g = PETSc.DMCreateGlobalVector(user_ctx.dm) -user_ctx.x_l = PETSc.DMCreateLocalVector(user_ctx.dm) -user_ctx.xold_l = PETSc.DMCreateLocalVector(user_ctx.dm) -user_ctx.xold_g = PETSc.DMCreateGlobalVector(user_ctx.dm) -user_ctx.f_l = PETSc.DMCreateLocalVector(user_ctx.dm) -J = PETSc.DMCreateMatrix(user_ctx.dm); # Jacobian from DMStag +f_g = PETSc.createglobalvector(user_ctx.dm) +user_ctx.x_l = PETSc.createlocalvector(user_ctx.dm) +user_ctx.xold_l = PETSc.createlocalvector(user_ctx.dm) +user_ctx.xold_g = PETSc.createglobalvector(user_ctx.dm) +user_ctx.f_l = PETSc.createlocalvector(user_ctx.dm) +J = PETSc.creatematrix(user_ctx.dm); # Jacobian from DMStag # initial non-zero structure of jacobian @@ -333,13 +354,18 @@ heatmap(xc_1D,zc_1D,Pe_pl', xlabel="Width", ylabel="Depth", title="Pe") # Compute sparsity & coloring # do this in case you solve new equations: -#@time user_ctx.jac, user_ctx.colors = ComputeSparsityPatternJacobian_automatic(user_ctx.x_l.array, user_ctx); +#@time user_ctx.jac, user_ctx.colors = ComputeSparsityPatternJacobian_automatic(user_ctx.x_l, user_ctx); # much faster, but requires reformulation in case you solve new equations: -@time user_ctx.jac, user_ctx.colors = ComputeSparsityPatternJacobian(user_ctx.x_l.array, user_ctx); +@time user_ctx.jac, user_ctx.colors = ComputeSparsityPatternJacobian(user_ctx.x_l, user_ctx); +ind = PETSc.LocalInGlobalIndices(user_ctx.dm); # extract indices +J = PETSc.MatSeqAIJ(sparse(user_ctx.jac[ind,ind])); -S = PETSc.SNES{Float64}(MPI.COMM_SELF; +# + + +S = PETSc.SNES{Float64}(petsclib,MPI.COMM_WORLD; snes_rtol=1e-12, snes_monitor=true, snes_max_it = 500, @@ -356,8 +382,6 @@ if true PETSc.setfunction!(S, FormRes!, f_g) PETSc.setjacobian!(S, FormJacobian!, J, J) - - # Preparation of visualisation ENV["GKSwstype"]="nul"; if isdir("viz2D_out")==true @@ -377,8 +401,8 @@ while t < 25.0 # Update old local values user_ctx.xold_g = x_g; - PETSc.DMGlobalToLocal(user_ctx.dm, x_g, PETSc.INSERT_VALUES, user_ctx.x_l) - PETSc.DMGlobalToLocal(user_ctx.dm, user_ctx.xold_g, PETSc.INSERT_VALUES, user_ctx.xold_l) + PETSc.update!(user_ctx.x_l, x_g, PETSc.INSERT_VALUES) + PETSc.update!(user_ctx.xold_l, user_ctx.xold_g, PETSc.INSERT_VALUES) # Update time t += user_ctx.dt; diff --git a/examples/SNES_ex2.jl b/examples/SNES_ex2.jl index 892f8f3f..ee105c57 100644 --- a/examples/SNES_ex2.jl +++ b/examples/SNES_ex2.jl @@ -1,7 +1,7 @@ # This implements src/snes/examples/tutorials/ex2.c from PETSc using the PETSc.jl package, using SNES # -# Note that yhe PETSc.jl package does currently not support MPI-parallel cases (even when PETSC_jll does support it) -# +# This solves the equations sequentially +# # Newton method to solve u'' + u^{2} = f, sequentially. using PETSc, MPI, LinearAlgebra, SparseArrays, UnicodePlots @@ -10,20 +10,21 @@ if ~MPI.Initialized() MPI.Init() end -PETSc.initialize() +petsclib = PETSc.petsclibs[1] +PETSc.initialize(petsclib) -``` +""" Computes initial guess -``` +""" function FormInitialGuess!(x) for i=1:length(x) x[i] = 0.50; end end -``` +""" Computes rhs forcing function -``` +""" function SetInitialArrays(n) h = 1.0/(n-1.0) F = zeros(n); @@ -37,48 +38,54 @@ function SetInitialArrays(n) return F end -``` +""" Computes the residual f, given solution vector x -``` +""" function FormResidual!(cf,cx, args...) - if typeof(cx) <: Vector{Float64} - x = cx; + if typeof(cx) <: Ptr{Nothing} + # When this routine is called from PETSc, cx is a pointer to a global vector + # That's why we have to transfer it first to + x = PETSc.unsafe_localarray(PETSc.scalartype(petsclib),cx) else - x = PETSc.unsafe_localarray(Float64,cx) + x = cx; end - if typeof(cf) <: Vector{Float64} - f = cf; + if typeof(cf) <: Ptr{Nothing} + f = PETSc.unsafe_localarray(PETSc.scalartype(petsclib),cf) else - f = PETSc.unsafe_localarray(Float64,cf) + f = cf; end - n = length(x); - print("Length of x is ", n, "\n"); - print("f is ", f,"\n"); - xp = LinRange(0.0,1.0, n); - F .= 6.0.*xp .+ (xp .+1.e-12).^6.0; # define source term function + n = length(x); + xp = LinRange(0.0,1.0, n); + F = 6.0.*xp .+ (xp .+1.e-12).^6.0; # define source term function dx = 1.0/(n-1.0); - f[1] = x[1] - 0.0; + f[1] = x[1] - 0.0; for i=2:n-1 f[i] = (x[i-1] - 2.0*x[i] + x[i+1])/dx^2 + x[i]*x[i] - F[i] end - f[n] = x[n] - 1.0; + f[n] = x[n] - 1.0; Base.finalize(x) Base.finalize(f) end -``` + +""" Computes the jacobian, given solution vector x -``` -function FormJacobian!(x, args...) +""" +function FormJacobian!(cx, args...) + + if typeof(cx) <: Ptr{Nothing} + x = PETSc.unsafe_localarray(PETSc.scalartype(petsclib),cx) + else + x = cx; + end - #x = PETSc.unsafe_localarray(Float64,cx) J = args[1]; # preconditioner = args[2], in case we want it to be different from J n = length(x); dx = 1.0/(n-1.0); - # interior points + # interior points (hand-coded jacobian) for i=2:n-1 J[i,i-1] = 1.0/dx^2; J[i,i ] = -2.0/dx^2 + 2.0*x[i]; @@ -91,7 +98,9 @@ function FormJacobian!(x, args...) if typeof(J) <: PETSc.AbstractMat PETSc.assemble(J); # finalize assembly + end + Base.finalize(x) end @@ -116,12 +125,12 @@ PJ = PETSc.MatSeqAIJ(Jsp); # transfer to # Setup snes x_s = PETSc.VecSeq(x); # solution vector -res = PETSc.VecSeq(zeros(size(x))); # residual vector +res = PETSc.VecSeq(F); # residual vector -S = PETSc.SNES{Float64}(MPI.COMM_SELF; +S = PETSc.SNES{Float64}(PETSc.petsclibs[1],MPI.COMM_WORLD; snes_rtol=1e-12, - snes_monitor=nothing, - snes_converged_reason=nothing); + snes_monitor=false, + snes_converged_reason=false); PETSc.setfunction!(S, FormResidual!, res) PETSc.setjacobian!(S, FormJacobian!, PJ, PJ) @@ -131,9 +140,10 @@ PETSc.solve!(x_s, S); # Extract & plot solution x_sol = x_s.array; # convert solution to julia format FormResidual!(res.array,x_sol) # just for checking, compute residual + @show norm(res.array) +PETSc.finalize(petsclib) +# plot solution in REPL lineplot(LinRange(0,1,n),x_sol,xlabel="width",ylabel="solution") - -#PETSc.finalize() diff --git a/examples/SNES_ex2b.jl b/examples/SNES_ex2b.jl index 94c09af6..9522ad2b 100644 --- a/examples/SNES_ex2b.jl +++ b/examples/SNES_ex2b.jl @@ -1,38 +1,46 @@ # This implements src/snes/examples/tutorials/ex2.c from PETSc using the PETSc.jl package, using SNES # -# This is the same as SNES_ex2b.j, except that we show how automatic differentiation can be used to -# compute the jacobian. +# This is the same as SNES_ex2.jl, except that we show how automatic differentiation can be used to +# compute the jacobian. That implies that the user does not have to provide a hand-coded jacobian. +# Note that the way we compute the jacobian here is not very efficient as we do not use the sparsity structure of the +# matrix; see the other examples for faster implementions. # # Newton method to solve u'' + u^{2} = f, sequentially. using PETSc, MPI, LinearAlgebra, SparseArrays, UnicodePlots, ForwardDiff +using Test + if ~MPI.Initialized() MPI.Init() end -PETSc.initialize() -``` +petsclib = PETSc.petsclibs[1] +PETSc.initialize(petsclib) + +""" Computes initial guess -``` +""" function FormInitialGuess!(x) for i=1:length(x) x[i] = 0.50; end end -``` +""" Computes the residual f, given solution vector x -``` +""" function FormResidual!(cf,cx, args...) if typeof(cx) <: Ptr{Nothing} - x = PETSc.unsafe_localarray(Float64,cx) + # When this routine is called from PETSc, cx is a pointer to a global vector + # That's why we have to transfer it first to + x = PETSc.unsafe_localarray(PETSc.scalartype(petsclib),cx) else x = cx; end if typeof(cf) <: Ptr{Nothing} - f = PETSc.unsafe_localarray(Float64,cf) + f = PETSc.unsafe_localarray(PETSc.scalartype(petsclib),cf) else f = cf; end @@ -51,24 +59,24 @@ function FormResidual!(cf,cx, args...) end -``` +""" Wrapper which makes it easier to compute the jacobian using automatic differntiation -``` +""" function ForwardDiff_res(x) - f = zero(x) # vector of zeros, of same type as e + f = zero(x) # vector of zeros, of same type as x FormResidual!(f,x); return f; end -``` +""" Computes the jacobian, given solution vector x -``` +""" function FormJacobian!(cx, args...) if typeof(cx) <: Ptr{Nothing} - x = PETSc.unsafe_localarray(Float64,cx) + x = PETSc.unsafe_localarray(PETSc.scalartype(petsclib),cx) else x = cx; end @@ -102,7 +110,7 @@ PJ = PETSc.MatSeqAIJ(Jsp); # transfer to PETSc (ini x_s = PETSc.VecSeq(x); # solution vector res = PETSc.VecSeq(zeros(size(x))); # residual vector -S = PETSc.SNES{Float64}(MPI.COMM_SELF; +S = PETSc.SNES{Float64}(PETSc.petsclibs[1],MPI.COMM_SELF; snes_rtol=1e-12, snes_monitor=nothing, snes_converged_reason=nothing); @@ -115,8 +123,10 @@ PETSc.solve!(x_s, S); # Extract & plot solution x_sol = x_s.array; # convert solution to julia format FormResidual!(res.array,x_sol) # just for checking, compute residual + @show norm(res.array) -lineplot(LinRange(0,1,n),x_sol,xlabel="width",ylabel="solution") +PETSc.finalize(petsclib) -#PETSc.finalize() +# plot solution in REPL +lineplot(LinRange(0,1,n),x_sol,xlabel="width",ylabel="solution") diff --git a/examples/laplacian.jl b/examples/laplacian.jl index b1c6504a..2c8cc889 100644 --- a/examples/laplacian.jl +++ b/examples/laplacian.jl @@ -18,8 +18,8 @@ function laplace(T, nx::Integer, ny::Integer=nx, lx::Integer=1, ly::Integer=lx) return A end +nx = 20 for T in PETSc.scalar_types - nx = 20 S = laplace(T, nx) m, n = size(S) @@ -27,8 +27,8 @@ for T in PETSc.scalar_types ksp = PETSc.KSP( M; - ksp_monitor_true_residual = nothing, - ksp_view = nothing, + ksp_monitor_true_residual = false, + ksp_view = false, ksp_type = "cg", ksp_rtol = 1e-8, pc_type = "gamg", diff --git a/src/dm.jl b/src/dm.jl index 26cc2a99..a81e569c 100644 --- a/src/dm.jl +++ b/src/dm.jl @@ -242,47 +242,150 @@ Updates `global_vec` from `local_vec` with insert `mode` # External Links $(_doc_external("DM/DMLocalToGlobal")) """ -update!(::DMGlobalVec, ::DMLocalVec, ::InsertMode) +function update!( global_vec::DMGlobalVec, local_vec::DMLocalVec, mode::InsertMode) end @for_petsc function update!( global_vec::DMGlobalVec{$PetscLib}, local_vec::DMLocalVec{$PetscLib}, - mode::InsertMode, + mode::InsertMode ) @assert local_vec.dm === global_vec.dm - @chk ccall( - (:DMLocalToGlobal, $petsc_library), - PetscErrorCode, - (CDM, CVec, InsertMode, CVec), - local_vec.dm, - local_vec, - mode, - global_vec, + + update_local2global!( global_vec.ptr, local_vec.ptr, mode, local_vec.dm); + + return nothing +end + +""" + update!( + global_vec::DMGlobalVec, + local_ptr::CVec, + mode::InsertMode, ) +Updates `global_vec` from a pointer to a local vector `local_ptr` with insert `mode` + +# External Links +$(_doc_external("DM/DMLocalToGlobal")) +""" +function update!( global_vec::DMGlobalVec, local_ptr::CVec, mode::InsertMode) end + +@for_petsc function update!( + global_vec::DMGlobalVec{$PetscLib}, + local_ptr::CVec, + mode::InsertMode +) + update_local2global!( global_vec.ptr, local_ptr, mode, global_vec.dm); + return nothing end """ update!( + global_ptr::CVec, local_vec::DMLocalVec, - global_vec::DMGlobalVec, mode::InsertMode, ) -Updates `local_vec` from `global_vec` with insert `mode` +Updates pointer to global vec `global_ptr` from `local_vec` with insert `mode` + +# External Links +$(_doc_external("DM/DMLocalToGlobal")) +""" +function update!( global_vec::DMGlobalVec, local_ptr::CVec, mode::InsertMode) end + +@for_petsc function update!( + global_ptr::CVec, + local_vec::DMLocalVec{$PetscLib}, + mode::InsertMode +) + update_local2global!( global_ptr, local_vec.ptr, mode, local_vec.dm); + + return nothing +end + +""" + update!( + local_vec::DMLocalVec, + global_ptr::CVec, + mode::InsertMode, + ) + +Updates `local_vec` from pointer to global vec `global_ptr` with insert `mode` # External Links $(_doc_external("DM/DMGlobalToLocal")) """ -update!(::DMLocalVec, ::DMGlobalVec, ::InsertMode) +function update!(local_vec::DMLocalVec,global_ptr::CVec, mode::InsertMode) end @for_petsc function update!( local_vec::DMLocalVec{$PetscLib}, + global_ptr::CVec, + mode::InsertMode, +) + + update_global2local!( local_vec.ptr, global_ptr, mode, local_vec.dm); + return nothing +end + +""" + update!( + local_ptr::CVec, + global_vec::DMGlobalVec + mode::InsertMode, + ) + +Updates pointer to local vec `local_ptr` from `global_vec` with insert `mode` + +# External Links +$(_doc_external("DM/DMGlobalToLocal")) +""" +function update!(local_ptr::CVec, global_vec::DMGlobalVec, mode::InsertMode) end + +@for_petsc function update!( + local_ptr::CVec, global_vec::DMGlobalVec{$PetscLib}, mode::InsertMode, ) + + update_global2local!( local_ptr, global_vec.ptr, mode, global_vec.dm); + return nothing +end + + +""" + update!( + local_vec::DMLocalVec, + global_vec::DMGlobalVec, + mode::InsertMode + ) + +Updates `local_vec` from a pointer to a global vec `global_ptr` with insert `mode` + +# External Links +$(_doc_external("DM/DMGlobalToLocal")) +""" +function update!(local_vec::DMLocalVec, global_vec::DMGlobalVec, mode::InsertMode) end + +@for_petsc function update!( + local_vec::DMLocalVec{$PetscLib}, + global_vec::DMGlobalVec{$PetscLib}, + mode::InsertMode +) @assert local_vec.dm === global_vec.dm + + update_global2local!( local_vec.ptr, global_vec.ptr, mode, local_vec.dm); + + return nothing +end + + +#= +@for_petsc function update!( + local_ptr::CVec, + global_vec::DMGlobalVec{$PetscLib}, + mode::InsertMode, +) @chk ccall( (:DMGlobalToLocal, $petsc_library), PetscErrorCode, @@ -290,7 +393,76 @@ update!(::DMLocalVec, ::DMGlobalVec, ::InsertMode) global_vec.dm, global_vec, mode, - local_vec, + local_ptr, + ) + + return nothing +end +=# + +""" + update_local2global!( + global_ptr::CVec, + local_ptr::CVec, + mode::InsertMode, + dm::AbstractDM + ) + +Updates pointer of `global_vec` from pointer of `local_vec` with insert `mode`. +Both vectors should belong to the same `dm` + +This is a low-level routine that is typically called by `update!` +""" +function update_local2global! end + +@for_petsc function update_local2global!( + global_ptr::CVec, + local_ptr::CVec, + mode::InsertMode, + dm::AbstractDM{$PetscLib} +) + @chk ccall( + (:DMLocalToGlobal, $petsc_library), + PetscErrorCode, + (CDM, CVec, InsertMode, CVec), + dm, + local_ptr, + mode, + global_ptr, + ) + + return nothing +end + +""" + update_global2local!( + local_ptr::CVec, + global_ptr::CVec, + mode::InsertMode, + dm::AbstractDM + ) + +Updates pointer to `local` vector from pointer to `global` vector with insert `mode`, assuming that both belong to the same `dm` + +This is a low-level routine that is typically called by `update!` + +""" +function update_global2local! end + +@for_petsc function update_global2local!( + local_ptr::CVec, + global_ptr::CVec, + mode::InsertMode, + dm::AbstractDM{$PetscLib} +) + @chk ccall( + (:DMGlobalToLocal, $petsc_library), + PetscErrorCode, + (CDM, CVec, InsertMode, CVec), + dm, + global_ptr, + mode, + local_ptr, ) return nothing diff --git a/src/dmstag.jl b/src/dmstag.jl index 7caa9c4a..6904a8a9 100644 --- a/src/dmstag.jl +++ b/src/dmstag.jl @@ -28,17 +28,13 @@ end const DMStagStencil = DMSTAGSTENCIL - # allows us to pass XXMat objects directly into CMat ccall signatures Base.cconvert(::Type{CDMStag}, obj::DMStag) = obj.ptr + # allows us to pass XXMat objects directly into Ptr{CMat} ccall signatures Base.unsafe_convert(::Type{Ptr{CDMStag}}, obj::DMStag) = convert(Ptr{CDMStag}, pointer_from_objref(obj)) - -#Base.eltype(::DMStag{T,PetscInt}) where {T,PetscInt} = T,PetscInt - - """ dm = DMStagCreate1d(::PetscLib, comm::MPI.Comm, @@ -343,7 +339,8 @@ Creates a compatible DMStag with different dof/stratum dofElement - [=0] number of degrees of freedom per element/3-cell kwargs... - [Optional] keyword arguments (see PETSc webpage), specifiable as stag_grid_x=100, etc. -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagCreateCompatibleDMStag.html) +# External Links +$(_doc_external("DMSTAG/DMStagCreateCompatibleDMStag")) """ function DMStagCreateCompatibleDMStag end @@ -404,7 +401,8 @@ Get number of DOF associated with each stratum of the grid. dof2 - the number of points per 2-cell (element in 2D, face in 3D) dof3 - the number of points per 3-cell (element in 3D) -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetDOF.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetDOF")) """ function DMStagGetDOF end @@ -451,7 +449,8 @@ Gets the global size of the DMStag object dm - the DMStag object M,N,P - size in x,y,z -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetGlobalSizes.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetGlobalSizes")) """ function DMStagGetGlobalSizes end @@ -496,7 +495,8 @@ Gets the local size of the DMStag object dm - the DMStag object M,N,P - size in x,y,z -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetLocalSizes.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetLocalSizes")) """ function DMStagGetLocalSizes end @@ -611,7 +611,8 @@ Set DMStag coordinates to be a uniform grid, storing all values. dm - the DMStag object xmin,xmax,ymin,ymax,zmin,zmax - maximum and minimum global coordinate values -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagSetUniformCoordinatesExplicit.html) +# External Links +$(_doc_external("DMSTAG/DMStagSetUniformCoordinatesExplicit")) """ function DMStagSetUniformCoordinatesExplicit end @@ -638,76 +639,6 @@ function DMStagSetUniformCoordinatesExplicit end return nothing end -#= -# replaced with createglobalvector in dm.jl -""" - vec = DMCreateGlobalVector( - dm::DMStag; - write_val=true, read_val=true - ) - -Creates a global vector from a DM object. - -NOTE: for now this is initialized sequentially; MPI should be added - - dm - the DM object - vec - the global vector - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMCreateGlobalVector.html) -""" -function DMCreateGlobalVector end - -@for_petsc function DMCreateGlobalVector( - dm::DMStag{$PetscLib}; - write_val=true, read_val=true - ) - - v = VecSeq(C_NULL, [0.0]) # empty vector - - @chk ccall((:DMCreateGlobalVector, $petsc_library), PetscErrorCode, (CDMStag, Ptr{CVec}), dm, v) - - # Link a julia array to the values from the new vector - # If we modify values here, it will automatically be changed in the PetcVec as well - v.array = unsafe_localarray($PetscScalar, v.ptr; write=write_val, read=read_val) - - return v -end -=# - -#= -# replaced with createlocalvector? -""" - vec = DMCreateLocalVector(dm::DMStag; - write_val=true, read_val=true - ) - -Creates a local vector from a DM object. - -NOTE: for now this is initialized sequentially; MPI should be added - - dm - the DM object - vec - the local vector - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMCreateLocalVector.html) -""" -function DMCreateLocalVector end - -@for_petsc function DMCreateLocalVector( - dm::DMStag{$PetscLib}; - write_val=true, read_val=true - ) - - v = VecSeq(C_NULL, [0.0]) # empty vector - - @chk ccall((:DMCreateLocalVector, $petsc_library), PetscErrorCode, (CDMStag, Ptr{CVec}), dm, v) - - # Link a julia array to the values from the new vector - # If we modify values here, it will automatically be changed in the PetcVec as well - v.array = unsafe_localarray($PetscScalar, v.ptr; write=write_val, read=read_val) - - return v -end -=# """ Array = DMStagVecGetArray(dm::DMStag, v::AbstractVec) @@ -867,7 +798,8 @@ Get slot for use with local product coordinate arrays. loc - the grid location slot - the index to use in local arrays -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetProductCoordinateLocationSlot.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetProductCoordinateLocationSlot")) """ function DMStagGetProductCoordinateLocationSlot end @@ -900,7 +832,8 @@ Get number of entries per element in the local representation. dm - the DMStag objects entriesPerElement - number of entries associated with each element in the local representation -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-master/docs/manualpages/DMSTAG/DMStagGetEntriesPerElement.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetEntriesPerElement")) """ function DMStagGetEntriesPerElement end @@ -926,7 +859,8 @@ Get elementwise stencil width. dm - the DMStag objects stencilWidth - stencil/halo/ghost width in elements -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetStencilWidth.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetStencilWidth")) """ function DMStagGetStencilWidth end @@ -953,7 +887,8 @@ Get index to use in accessing raw local arrays. c - component ( the degree of freedom) slot - index to use -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetLocationSlot.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetLocationSlot")) """ function DMStagGetLocationSlot end @@ -983,9 +918,10 @@ Destroys a DMSTAG object and releases the memory dm - the DM object to destroy -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMDestroy.html) +# External Links +$(_doc_external("DM/DMDestroy")) """ -function destroy end +function destroy(dm::DMStag) end @for_petsc function destroy(dm::DMStag{$PetscLib}) finalized($petsclib) || @@ -993,185 +929,39 @@ function destroy end return nothing end -#= -# MOVED TO dm.jl -""" - type = gettype(dm::DMStag) - -Gets the DM type name (as a string) from the DM. - - dm - The DM - type- The DM type name - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMGetType.html) -""" -function gettype end - -@for_petsc function gettype(dm::DMStag{$PetscLib}) - t_r = Ref{CDMStagType}() - @chk ccall((:DMGetType, $petsc_library), PetscErrorCode, (CDMStag, Ptr{CDMStagType}), dm, t_r) - return unsafe_string(t_r[]) -end - - -# REPLACED WITH ROUTINE IN DM.JL -""" - view(dm::DMStag, viewer::Viewer=ViewerStdout(dm.comm)) - -Views a DMSTAG object. - - dm - the DM object to view - viewer - the viewer - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMView.html) -""" -#function view end - -#@for_petsc function view(dm::DMStag{$PetscLib}, viewer::AbstractViewer{$PetscLib}=ViewerStdout($petsclib, MPI.MPI_COMM_SELF)) -# @chk ccall((:DMView, $petsc_library), PetscErrorCode, -# (CDMStag, CPetscViewer), -# dm, viewer); -# return nothing -#end -=# - -#= -# REPLACED WITH PETSc.getcorners -""" - - x,m,nExtrax = DMStagGetCorners(dm:DMStag) in 1D - x[],m[],nExtrax[] = DMStagGetCorners(dm:DMStag) in 2D or 3D - -Returns the global element indices of the local region (excluding ghost points). - - dm - the DMStag object - x,y,z - starting element indices in each direction - m,n,p - element widths in each direction - nExtrax,nExtray,nExtraz - number of extra partial elements in each direction. - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetCorners.html) -""" -function DMStagGetCorners end - -@for_petsc function DMStagGetCorners(dm::DMStag{$PetscLib}) - - x = Ref{$PetscInt}() - y = Ref{$PetscInt}() - z = Ref{$PetscInt}() - m = Ref{$PetscInt}() - n = Ref{$PetscInt}() - p = Ref{$PetscInt}() - nExtrax = Ref{$PetscInt}() - nExtray = Ref{$PetscInt}() - nExtraz = Ref{$PetscInt}() - - @chk ccall((:DMStagGetCorners, $petsc_library), PetscErrorCode, - ( - CDMStag, - Ptr{$PetscInt}, Ptr{$PetscInt}, Ptr{$PetscInt}, - Ptr{$PetscInt}, Ptr{$PetscInt}, Ptr{$PetscInt}, - Ptr{$PetscInt}, Ptr{$PetscInt}, Ptr{$PetscInt} - ), - dm, - x,y,z, - m,n,p, - nExtrax,nExtray,nExtraz - ) - - dim = getdimension(dm); - - if dim==1 - X = (x[],) - M = (m[],) - NEXTRA = (nExtrax[],) - return X[1], M[1], NEXTRA[1] - elseif dim==2 - return (x[], y[]), (m[],n[]), (nExtrax[],nExtray[]) - elseif dim==3 - return (x[], y[], z[]), (m[],n[],p[]), (nExtrax[],nExtray[],nExtraz[]) - end -end -=# - -#@for_petsc PETSc.getcorners(dm::DMStag{$PetscLib}) = DMStagGetCorners(dm::DMStag{$PetscLib}) - -#= -# REPLACED WITH PETSc.getghostcorners -""" - x,m = DMStagGetGhostCorners(dm:DMStag) in 1D - x[],m[] = DMStagGetGhostCorners(dm:DMStag) in 2D or 3D - -Return global element indices of the local region (including ghost points). - - dm - the DMStag object - x[] - starting element indices in each direction - m[] - element widths in each direction - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetGhostCorners.html) """ -function DMStagGetGhostCorners end - -@for_petsc function DMStagGetGhostCorners(dm::DMStag) - - x = Ref{$PetscInt}() - y = Ref{$PetscInt}() - z = Ref{$PetscInt}() - m = Ref{$PetscInt}() - n = Ref{$PetscInt}() - p = Ref{$PetscInt}() - - @chk ccall((:DMStagGetGhostCorners, $petsc_library), PetscErrorCode, - ( - CDMStag, - Ptr{$PetscInt}, Ptr{$PetscInt}, Ptr{$PetscInt}, - Ptr{$PetscInt}, Ptr{$PetscInt}, Ptr{$PetscInt} - ), - dm, - x,y,z, - m,n,p - ) - - dim = getdimension(dm); - - if dim==1 - X = (x[],) - M = (m[],) - return X[1], M[1] - elseif dim==2 - return (x[], y[]), (m[],n[]) - elseif dim==3 - return (x[], y[], z[]), (m[],n[],p[]) - end -end -=# - -""" - Indices = DMStagGetCentralNodes(dm::DMStag) + Indices = DMStagGetIndices(dm::DMStag) -Return indices of start and end of the central nodes of a local array built from the input `dm` (excluding ghost nodes). +Return indices of start and end of the central/vertex nodes of a local array built from the input `dm`. +This takes ghost points into account and helps dm - the DMStag object Indices - indices of lower and upper range of center and vertex nodes """ -function DMStagGetCentralNodes end +function DMStagGetIndices end -@for_petsc function DMStagGetCentralNodes(dm::DMStag) +@for_petsc function DMStagGetIndices(dm::DMStag) # In Julia, indices in arrays start @ 1, whereas they can go negative in C - - #g_start, g_N = getghostcorners(dm); # MODIFY gc = getghostcorners(dm); - - #start,N, nExtra = getcorners(dm); # MODIFY c = getcorners(dm); - # Note that we add the +1 for julia/petsc consistency - center = ( x= (c.lower[1]+1):(c.upper[1]+1), - y= (c.lower[2]+1):(c.upper[2]+1), - z= (c.lower[3]+1):(c.upper[3]+1) ) + # If we have ghosted boundaries, we need to shift the start/end points, as ghosted + # boundaries are treated in PETSc with negative numbers, whereas in Julia everything is 1-based + + # NOTE: we have not yet tested this in parallel + Diff = c.lower - gc.lower; + Start = c.lower + Diff; + End = Start + c.size - ones(Int64,3); - vertex = ( x=c.lower[1]+1:c.upper[1]+2 , - y=c.lower[2]+1:c.upper[2]+2 , - z=c.lower[3]+1:c.upper[3]+2 ) + # Note that we add the shift for julia/petsc consistency + shift = 0; + center = ( x= Start[1]:End[1], + y= Start[2]:End[2], + z= Start[3]:End[3] ) + + vertex = ( x= Start[1]:End[1]+1 , + y= Start[2]:End[2]+1 , + z= Start[3]:End[3]+1 ) return (center=center, vertex=vertex) @@ -1187,7 +977,8 @@ Get boundary types. dm - the DMStag object Bx,By,Bz - boundary types -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetBoundaryTypes.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetBoundaryTypes")) """ function DMStagGetBoundaryTypes end @@ -1227,7 +1018,8 @@ Get number of ranks in each direction in the global grid decomposition. dm - the DMStag object nRanks0,nRanks1,nRanks2 - number of ranks in each direction in the grid decomposition -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetNumRanks.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetNumRanks")) """ function DMStagGetNumRanks end @@ -1276,7 +1068,8 @@ This puts a value inside a global vector using DMStagStencil. val - the value to be set insertMode - INSERT_VALUES or ADD_VALUES -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagVecSetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagVecSetValuesStencil")) """ function DMStagVecSetValuesStencil end @@ -1328,7 +1121,8 @@ This puts values inside a global vector using DMStagStencil val - the value to be set insertMode - INSERT_VALUES or ADD_VALUES -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagVecSetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagVecGetValuesStencil")) """ function DMStagVecSetValuesStencil end @@ -1383,7 +1177,8 @@ Get vector values using grid indexing pos - locations to obtain values from (as an array of DMStagStencil values) val - value at the point -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagVecGetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagVecGetValuesStencil")) """ function DMStagVecGetValuesStencil end @@ -1430,7 +1225,8 @@ Get vector values using grid indexing. pos - locations to obtain values from (as an array of DMStagStencil values) val - value at the point -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagVecGetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagVecGetValuesStencil")) """ function DMStagVecGetValuesStencil end @@ -1484,7 +1280,8 @@ This reads a single value from a matrix DMStagStencil posCol - the location of the row of the set value, given by a DMStagStencil struct (as a vector) val - the value -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagMatGetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagMatGetValuesStencil")) """ function DMStagMatGetValuesStencil end @@ -1538,7 +1335,9 @@ This reads a single value from a matrix DMStagStencil. posCol - the location of the row of the set value, given by a DMStagStencil struct (as a vector) val - the value -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagMatGetValuesStencil.html) + +# External Links +$(_doc_external("DMSTAG/DMStagMatGetValuesStencil")) """ function DMStagMatGetValuesStencil end @@ -1603,16 +1402,14 @@ function LocalInGlobalIndices(dm::DMStag) ind_g = createglobalvector(dm) v_ind_l = createlocalvector(dm) - # Set indices in local vector - for i=1:length(v_ind_l) - v_ind_l[i] = i + for i=1:length(ind_g) + ind_g[i] = i end - @show v_ind_l - update!(ind_g, v_ind_l, INSERT_VALUES); # update global vector - - #@show v_ind_l ind_g + update!(v_ind_l, ind_g, INSERT_VALUES); # update local vector - return Int64.(ind_g) + ix = findall( real(v_ind_l) .> 0 ); #adding real here such that it works with Complex PetscLib as well + + return ix end """ @@ -1637,7 +1434,8 @@ This puts values inside a matrix using DMStagStencil position. val - the value to be set insertMode - INSERT_VALUES or ADD_VALUES -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagMatSetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagMatSetValuesStencil")) """ function DMStagMatSetValuesStencil end @@ -1697,7 +1495,8 @@ This puts values inside a matrix using DMStagStencil position val - the value to be set insertMode - INSERT_VALUES or ADD_VALUES -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagMatSetValuesStencil.html) +# External Links +$(_doc_external("DMSTAG/DMStagMatSetValuesStencil")) """ function DMStagMatSetValuesStencil end @@ -1749,29 +1548,7 @@ function DMStagMatSetValuesStencil end return nothing end -#= -# REPLACED WITH PETSc.getdimension -""" - dim = DMGetDimension(dm::DMStag) - -Return the topological dimension of the DM. - - dm - The DM - dim - dimensions - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMGetDimension.html) -""" -function DMGetDimension end - -@for_petsc function DMGetDimension(dm::DMStag) - dim = Ref{$PetscInt}() - - @chk ccall((:DMGetDimension, $petsc_library), PetscErrorCode, (CDMStag,Ptr{$PetscInt}), dm, dim ) - - return dim[] -end -=# - +# NOTE: We should likely rewrite this to make it consistent with update!, which has a slighlt different calling sequence """ DMLocalToGlobal( dm::DMStag, @@ -1786,8 +1563,10 @@ Updates global vectors from local vectors. l - the local vector mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that base point. g - the global vector + +# External Links +$(_doc_external("DM/DMLocalToGlobal")) -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMLocalToGlobal.html) """ function DMLocalToGlobal end @@ -1797,7 +1576,6 @@ function DMLocalToGlobal end g::AbstractVec{$PetscScalar} ) - #DMLocalToGlobal(dm,l.ptr, mode,g.ptr) update!(l,g,mode); return nothing @@ -1809,45 +1587,17 @@ end g::CVec ) - #DMLocalToGlobal(dm,l.ptr, mode,g) update!(l,g,mode); return nothing end -#= -# replaced with update! -@for_petsc function DMLocalToGlobal( - dm::DMStag{$PetscLib}, - l::CVec, - mode::InsertMode, - g::CVec - ) - - @chk ccall((:DMLocalToGlobal, $petsc_library), PetscErrorCode, - ( - CDMStag, - CVec, - InsertMode, - CVec - ), - dm, - l, - mode, - g) - - - return nothing -end -=# - @for_petsc function DMGlobalToLocal( g::AbstractVec{$PetscScalar}, mode::InsertMode, l::AbstractVec{$PetscScalar} ) - #DMGlobalToLocal(dm,g.ptr, mode::InsertMode,l.ptr) update!(g,l,mode) return nothing @@ -1859,88 +1609,11 @@ end mode::InsertMode, l::AbstractVec{$PetscScalar} ) - - # DMGlobalToLocal(dm,g, mode::InsertMode,l.ptr) update!(g,l,mode) return nothing end -#= -# replaced with update! -""" - DMGlobalToLocal( - dm::DMStag, - g::CVec, - mode::InsertMode, - l::CVec - ) - -Update local vectors from global vector. - - dm - the DM object - g - the global vector - mode - INSERT_VALUES or ADD_VALUES - l - the local vector - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMGlobalToLocal.html) -""" -function DMGlobalToLocal end - -@for_petsc function DMGlobalToLocal( - dm::DMStag{$PetscLib}, - g::CVec, - mode::InsertMode, - l::CVec - ) - - @chk ccall((:DMGlobalToLocal, $petsc_library), PetscErrorCode, - ( - CDMStag, - CVec, - InsertMode, - CVec - ), - dm, - g, - mode, - l - ) - - return nothing -end -=# - -#= -# REPLACED WITH PETSc.creatematrix? -""" - mat = DMCreateMatrix(dm::DMStag) - -Generates a matrix from a DMStag object. The type is a MatSeqAIJ is we are on 1 core. - - dm - the DMStag object - mat - the matrix of type MatSeqAIJ (on 1 core) - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMCreateMatrix.html) -""" -function DMCreateMatrix end - -@for_petsc function DMCreateMatrix(dm::DMStag) - # Note: the matrix cannot be viewed yet, as it remains unassembled - # ideally, we should modify the viewer to take care of this case - - if dm.comm==MPI.COMM_SELF - mat = MatSeqAIJ{$PetscScalar}(C_NULL) - elseif dm.comm==MPI.COMM_WORLD - error("MatMPIAIJ still to be implemented") - end - - @chk ccall((:DMCreateMatrix, $petsc_library), PetscErrorCode, - (CDMStag, Ptr{CMat}), dm, mat) - - return mat -end -=# """ stencilType = DMStagGetStencilType(dm::DMStag) @@ -1950,7 +1623,8 @@ Get elementwise ghost/halo stencil type. dm - the DMStag object stencilType - the elementwise ghost stencil type: DMSTAG_STENCIL_BOX, DMSTAG_STENCIL_STAR, or DMSTAG_STENCIL_NONE -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetStencilType.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetStencilType")) """ function DMStagGetStencilType end @@ -1977,7 +1651,8 @@ Returns boolean value to indicate whether this rank is first in each direction i dm - the DMStag object fr_X,fr_Y,fr_Z - whether this rank is first in each direction -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetIsFirstRank.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetIsFirstRank")) """ function DMStagGetIsFirstRank end @@ -2008,7 +1683,8 @@ Returns boolean value to indicate whether this rank is last in each direction in dm - the DMStag object fr_X,fr_Y,fr_Z - whether this rank is last in each direction -From [PETSc Manual])(https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSTAG/DMStagGetIsLastRank.html) +# External Links +$(_doc_external("DMSTAG/DMStagGetIsLastRank")) """ function DMStagGetIsLastRank end @@ -2029,67 +1705,6 @@ function DMStagGetIsLastRank end return fr_X[]== PETSC_TRUE, fr_Y[]== PETSC_TRUE, fr_Z[]== PETSC_TRUE end -#= -# REPLACED WITH getcoordinateDM -""" - dmnew = DMGetCoordinateDM(dm::DMStag; kwargs...) - -Gets the DM that prescribes coordinate layout and scatters between global and local coordinates. - - dm - the DMStag object - kwargs... - [Optional] keyword arguments (see PETSc webpage), specifiable as stag_grid_x=100, etc. - dmnew - Coordinate DM - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMGetCoordinateDM.html) -""" -function DMGetCoordinateDM end - -@for_petsc function DMGetCoordinateDM( - dm::DMStag{$PetscLib}; kwargs...) - - opts = Options($PetscLib, kwargs...) - - dmnew = DMStag{$PetscLib}(C_NULL, opts) # retrieve options - - @chk ccall((:DMGetCoordinateDM, $petsc_library), PetscErrorCode, - (CDMStag, Ptr{CDMStag}), - dm, dmnew) - - return dmnew -end -=# - -#= -# replaced with getcoordinateslocal -""" - v = DMGetCoordinatesLocal(dm::DMStag; write_val=true, read_val=true) - -Gets a local vector with the coordinates associated with the DM. - - dm - the DMStag object - v - coordinate local vector - -From [PETSc Manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DM/DMGetCoordinatesLocal.html) -""" -function DMGetCoordinatesLocal end - -@for_petsc function DMGetCoordinatesLocal( - ::$UnionPetscLib, - dm::DMStag; write_val=true, read_val=true) - - # Should probably be a parallel vector - v = VecSeq(C_NULL, [0.0]) # empty vector - - @chk ccall((:DMGetCoordinatesLocal, $petsc_library), PetscErrorCode, (CDMStag, Ptr{CVec}), dm, v) - - # Link a julia array to the values from the new vector - # If we modify values here, it will automatically be changed in the PetcVec as well - v.array = unsafe_localarray($PetscScalar, v.ptr; write=write_val, read=read_val) - - return v -end -=# - """ setuniformcoordinates!( dm::DMStag, @@ -2155,7 +1770,6 @@ $(_doc_external("DMSTAG/DMStagGetCorners")) function getcorners(dm::DMStag) end @for_petsc function getcorners(dm::DMStag{$PetscLib}) - # info = DMDALocalInfo{$PetscInt}() corners = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] local_size = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] nExtra = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] diff --git a/src/snes.jl b/src/snes.jl index 3cee2b8e..56375688 100644 --- a/src/snes.jl +++ b/src/snes.jl @@ -43,18 +43,40 @@ function _snesjac(csnes::CSNES, cx::CVec, cAmat::CMat, cPmat::CMat, ctx::Ptr{Cvo snes.Jeval(cAmat, cPmat, cx) end =# - -@for_libpetsc begin - - function SNES{$PetscScalar}(comm::MPI.Comm; kwargs...) +""" + SNES{PetscScalar}( + ::UnionPetscLib, + comm::MPI.Comm; + snessetfromoptions = true, + options...) + +Initializes a `SNES` nonlinear solver object +""" +function SNES() end + +@for_petsc function SNES{$PetscScalar}( + ::$UnionPetscLib, + comm::MPI.Comm; + snessetfromoptions = true, + options...) + @assert initialized($petsclib) - opts = Options($petsclib, kwargs...) + opts = Options($petsclib, options...) snes = SNES{$PetscScalar, $PetscLib}(C_NULL, opts, nothing, nothing, nothing, nothing, nothing, nothing) - @chk ccall((:SNESCreate, $libpetsc), PetscErrorCode, (MPI.MPI_Comm, Ptr{CSNES}), comm, snes) - + with(snes.opts) do - setfromoptions!(snes) + + @chk ccall( + (:SNESCreate, $petsc_library), + PetscErrorCode, + (MPI.MPI_Comm, + Ptr{CSNES}), + comm, + snes) + end + + snessetfromoptions && setfromoptions!(snes) if comm == MPI.COMM_SELF finalizer(destroy, snes) @@ -62,6 +84,7 @@ end return snes end +@for_libpetsc begin function (::SNESFn{$PetscScalar})(csnes::CSNES, cx::CVec, cfx::CVec, ctx::Ptr{Cvoid})::$PetscInt snes = unsafe_pointer_to_objref(ctx) diff --git a/test/dmda.jl b/test/dmda.jl index 4891f0f7..c250c86c 100644 --- a/test/dmda.jl +++ b/test/dmda.jl @@ -435,38 +435,79 @@ end local_vec = PETSc.createlocalvector(da) global_vec = PETSc.createglobalvector(da) - # Fill everything with some data - fill!(local_vec, mpirank) - fill!(global_vec, mpisize) - - # Add the local values to the global values - PETSc.update!(global_vec, local_vec, PETSc.ADD_VALUES) - - # end points added with neighbor due to ghost of size 1 - bot_val = mpisize + mpirank + (mpirank == 0 ? 0 : mpirank - 1) - top_val = mpisize + mpirank + (mpirank == mpisize - 1 ? 0 : mpirank + 1) - @test global_vec[corners.lower[1]] == bot_val - @test global_vec[corners.upper[1]] == top_val - - # Center is just self plus the global - for i in (corners.lower[1] + 1):(corners.upper[1] - 1) - @test global_vec[i] == mpisize + mpirank + bot_val = 0; top_val = 0; + for i=1:4 + # Fill everything with some data + fill!(local_vec, mpirank) + fill!(global_vec, mpisize) + + # Add the local values to the global values using the high-level routine + if i==1 + # 1) Use the high-level routine to do this + PETSc.update!(global_vec, local_vec, PETSc.ADD_VALUES) + elseif i==2 + # 2) Use pointer to local vector + PETSc.update!(global_vec, local_vec.ptr, PETSc.ADD_VALUES) + elseif i==3 + # 3) Use pointer to global vector + PETSc.update!(global_vec.ptr, local_vec, PETSc.ADD_VALUES) + elseif i==4 + # 4) Use low-level routines + PETSc.update_local2global!(global_vec.ptr, local_vec.ptr, PETSc.ADD_VALUES, local_vec.dm) + + end + + # end points added with neighbor due to ghost of size 1 + bot_val = mpisize + mpirank + (mpirank == 0 ? 0 : mpirank - 1) + top_val = mpisize + mpirank + (mpirank == mpisize - 1 ? 0 : mpirank + 1) + @test global_vec[corners.lower[1]] == bot_val + @test global_vec[corners.upper[1]] == top_val + + # Center is just self plus the global + for i in (corners.lower[1] + 1):(corners.upper[1] - 1) + @test global_vec[i] == mpisize + mpirank + end end - # reset the local values with the global values - PETSc.update!(local_vec, global_vec, PETSc.INSERT_VALUES) + # Reset the local values with the global values. + # This mainy tests the different local to global update! implementations + for i=1:4 + + # There are different implementations of copying global to local vectors + # That is useful if this is combined with SNES routines, as they only + # return the pointers to the vectors (not the full struct) + if i==1 + # 1) Use the high-level routine to do the copy + PETSc.update!(local_vec, global_vec, PETSc.INSERT_VALUES) - # My first value and my ghost should be the bot/top values - @test local_vec[1] == bot_val - @test local_vec[2] == bot_val - @test local_vec[end - 1] == top_val - @test local_vec[end] == top_val + elseif i==2 + # 2) Use a routine where a pointer to the global vector is mixed with a local vector + PETSc.update!(local_vec, global_vec.ptr, PETSc.INSERT_VALUES) - # interior is just self plus the global - for i in 3:(length(local_vec) - 2) - @test local_vec[i] == mpisize + mpirank + elseif i==3 + # 3) Use a routine where a global vector is mixed with a pointer to a local vector + PETSc.update!(local_vec.ptr, global_vec, PETSc.INSERT_VALUES) + + elseif i==4 + # 4) Use the low-level (pointer) routine to do the same (not) + PETSc.update_global2local!(local_vec.ptr, global_vec.ptr, PETSc.INSERT_VALUES, global_vec.dm); # Note that this is a lower-level function that is not typically used + + end + + # My first value and my ghost should be the bot/top values + @test local_vec[1] == bot_val + @test local_vec[2] == bot_val + @test local_vec[end - 1] == top_val + @test local_vec[end] == top_val + + # interior is just self plus the global + for i in 3:(length(local_vec) - 2) + @test local_vec[i] == mpisize + mpirank + end + end + # Test DM Coordinates coord_da = PETSc.getcoordinateDM(da) # Crank it up to 11! diff --git a/test/old_test.jl b/test/old_test.jl index 29b41e53..f59593c1 100644 --- a/test/old_test.jl +++ b/test/old_test.jl @@ -18,7 +18,7 @@ PETSc.initialize() w = M*x @test w ≈ S*x - ksp = PETSc.KSP(M; ksp_rtol=1e-8, pc_type="jacobi", ksp_monitor=nothing) + ksp = PETSc.KSP(M; ksp_rtol=1e-8, pc_type="jacobi", ksp_monitor=false) #PETSc.settolerances!(ksp; rtol=1e-8) @test PETSc.gettype(ksp) == "gmres" # default @@ -91,7 +91,7 @@ PETSc.initialize() Base.finalize(x) end - S = PETSc.SNES{Float64}(MPI.COMM_SELF; ksp_rtol=1e-4, pc_type="none") + S = PETSc.SNES{Float64}(PETSc.petsclibs[1],MPI.COMM_SELF; ksp_rtol=1e-4, pc_type="none") PETSc.setfunction!(S, F!, PETSc.VecSeq(zeros(2))) PETSc.setjacobian!(S, updateJ!, PJ, PJ) @test PETSc.solve!([2.0,3.0], S) ≈ [1.0,2.0] rtol=1e-4 diff --git a/test/runtests.jl b/test/runtests.jl index 026d7ce6..1bc753d2 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -24,6 +24,7 @@ include("options.jl") include("dmda.jl") include("old_test.jl") include("test_dmstag.jl") +include("test_snes.jl") # Run the examples to make sure they all work include("examples.jl") diff --git a/test/test_dmstag.jl b/test/test_dmstag.jl index 9c299fef..8bd47555 100644 --- a/test/test_dmstag.jl +++ b/test/test_dmstag.jl @@ -3,9 +3,6 @@ using PETSc, MPI using ForwardDiff, SparseArrays MPI.Initialized() || MPI.Init() -#petsclib = PETSc.petsclibs[1] -#PetscScalar = PETSc.scalartype(petsclib) -#PetscInt = PETSc.inttype(petsclib) @testset "DMStagCreate1d" begin @@ -85,8 +82,8 @@ MPI.Initialized() || MPI.Init() @test corners.size[1]==10 # keyword overrides the specified value @test PETSc.DMStagGetBoundaryTypes(dm_ghosted) == PETSc.DM_BOUNDARY_GHOSTED - ind = PETSc.DMStagGetCentralNodes(dm_ghosted); - @test ind.center.x[3] == 4 + ind = PETSc.DMStagGetIndices(dm_ghosted); + @test ind.center.x[3] == 5 # simple test to retrieve the KSP object # NOTE: need to implement a similar SNES routine @@ -420,26 +417,23 @@ end end - - -#= - # ----------------- -# Example of SNES, with AD jacobian - -@testset "DMSTAG_AND_SNES" begin +# Example of DMStag & SNES with AD jacobian +@testset "DMStag: 1D SNES AD" begin + # Tell AD that it can handle Complex as scalars + ForwardDiff.can_dual(::Type{ComplexF64}) = true + ForwardDiff.can_dual(::Type{ComplexF32}) = true + comm = MPI.COMM_WORLD mpirank = MPI.Comm_rank(comm) mpisize = MPI.Comm_size(comm) - petsclib = PETSc.petsclibs[1] - #for petsclib in PETSc.petsclibs - + for petsclib in PETSc.petsclibs PETSc.initialize(petsclib) PetscScalar = PETSc.scalartype(petsclib) PetscInt = PETSc.inttype(petsclib) - + # Define a struct that holds data we need in the local SNES routines below mutable struct Data_1{PetscScalar,PetscInt} dm @@ -449,50 +443,27 @@ end user_ctx = Data_1{PetscScalar,PetscInt}(nothing, nothing, nothing); # holds data we need in the local - # Construct a 1D test case for a coupled P-T diffusion solver, with 1 DOF @ the center & 1 DOF @ faces - nx = 21; - #user_ctx.dm = PETSc.DMStagCreate1d(petsclib,comm,PETSc.DM_BOUNDARY_NONE,nx,1,1); - user_ctx.dm = PETSc.DMStagCreate1d(petsclib,comm, - PETSc.DM_BOUNDARY_GHOSTED, - nx, - 1, # DOF @ vertex - 1, # DOF @ center - PETSc.DMSTAG_STENCIL_BOX, - 1); # Stencil width - - - x_g = PETSc.createglobalvector(user_ctx.dm) - f_g = PETSc.createglobalvector(user_ctx.dm) - user_ctx.x_l = PETSc.createlocalvector(user_ctx.dm) - user_ctx.f_l = PETSc.createlocalvector(user_ctx.dm) - - - function FormRes!(cfx_g, cx_g, user_ctx) + function FormRes!(ptr_fx_g, ptr_x_g, user_ctx) - - # Note that in PETSc, cx and cfx are pointers to global vectors. - - # Copy global to local vectors - @show cx_g user_ctx.x_l - PETSc.update!(user_ctx.x_l, cx_g, PETSc.INSERT_VALUES) - PETSc.update!(user_ctx.f_l, cfx_g, PETSc.INSERT_VALUES) - # PETSc.DMGlobalToLocal(user_ctx.dm, cfx_g, PETSc.INSERT_VALUES, user_ctx.f_l) - + # Note that in PETSc, ptr_x_g and ptr_fx_g are pointers to global vectors. + # Copy global to local vectors that are stored in user_ctx + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + PETSc.update!(user_ctx.f_l, ptr_fx_g, PETSc.INSERT_VALUES) + # Retrieve arrays from the local vectors ArrayLocal_x = PETSc.DMStagVecGetArrayRead(user_ctx.dm, user_ctx.x_l); # array with all local x-data ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, user_ctx.f_l); # array with all local residual - + # Compute local residual ComputeLocalResidual(user_ctx.dm, ArrayLocal_x, ArrayLocal_f, user_ctx) # Finalize local arrays Base.finalize(ArrayLocal_x) Base.finalize(ArrayLocal_f) - + # Copy local into global residual vector - PETSc.update!(cfx_g, user_ctx.f_l, PETSc.INSERT_VALUES) - # PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.f_l, PETSc.INSERT_VALUES, cfx_g) - + PETSc.update!(ptr_fx_g, user_ctx.f_l, PETSc.INSERT_VALUES) + end function ComputeLocalResidual(dm, ArrayLocal_x, ArrayLocal_f, user_ctx) @@ -505,7 +476,7 @@ end fP = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_f, PETSc.DMSTAG_ELEMENT, 0); # compute the FD stencil - indices = PETSc.DMStagGetCentralNodes(dm); # indices of (center/element) points, not including ghost values. + indices = PETSc.DMStagGetIndices(dm); # indices of (center/element) points, not including ghost values. gc = PETSc.getghostcorners(user_ctx.dm); # start and end of loop including ghost points c = PETSc.getcorners(user_ctx.dm); # start and end of loop including ghost points @@ -516,7 +487,6 @@ end # Nonlinear equation @ nodal points ind = indices.vertex.x; # There is one more "vertex" point - @show ind size(T) i = ind[2:end-1] fT[ind[1]] = T[ind[1] ]-0.5; # left BC fT[ind[end]] = T[ind[end]]-2.0; # right BC @@ -539,13 +509,12 @@ end ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, f); # array with all local residual ComputeLocalResidual(user_ctx.dm, ArrayLocal_x, ArrayLocal_f, user_ctx); - # As the residual vector f is linked with ArrayLocal_f, we don't need to pass ArrayLocal_f back return f; end - function FormJacobian!(cx_g, J, P, user_ctx) + function FormJacobian!(ptr_x_g, J, P, user_ctx) # This requires several steps: # @@ -554,235 +523,246 @@ end # this routine requires julia vectors as input) # Extract the local vector - #PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) - PETSc.update!(user_ctx.x_l, cx_g, PETSc.INSERT_VALUES) - - x = PETSc.unsafe_localarray(Float64, user_ctx.x_l.ptr; write=false, read=true) + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + x = PETSc.unsafe_localarray(PetscScalar, user_ctx.x_l.ptr; write=false, read=true) f_Residual = (x -> ForwardDiff_res(x, user_ctx)); # pass additional arguments into the routine J_julia = ForwardDiff.jacobian(f_Residual,x); - + # Note: since x is the LOCAL vector, J_julia also ends up having the same size. ind = PETSc.LocalInGlobalIndices(user_ctx.dm); - # @show ind typeof(ind) size(J_julia) sparse(J_julia[ind,ind]) J if PETSc.assembled(J) == false - println("Assembling") J = PETSc.MatSeqAIJ(sparse(J_julia[ind,ind])); - @show J PETSc.assembled(J) else J .= sparse(J_julia[ind,ind]); end + Base.finalize(x_g) return sparse(J_julia[ind,ind]), ind end - # Main SNES part + # Main part + + # Construct a 1D test case for a coupled P-T diffusion solver, with 1 DOF @ the center & 1 DOF @ faces + nx = 21; + user_ctx.dm = PETSc.DMStagCreate1d(petsclib,comm, + PETSc.DM_BOUNDARY_GHOSTED, + nx, + 1, # DOF @ vertex + 1, # DOF @ center + PETSc.DMSTAG_STENCIL_BOX, + 1); # Stencil width + + + x_g = PETSc.createglobalvector(user_ctx.dm) + f_g = PETSc.createglobalvector(user_ctx.dm) + user_ctx.x_l = PETSc.createlocalvector(user_ctx.dm) + user_ctx.f_l = PETSc.createlocalvector(user_ctx.dm) + + + PJ = PETSc.creatematrix(user_ctx.dm); # extract (global) matrix from DMStag J_julia, ind = FormJacobian!(x_g, PJ, PJ, user_ctx) PJ = PETSc.MatSeqAIJ(J_julia) # assemble non-zero structure - - @show PETSc.assembled(PJ) - # @show PJ - - S = PETSc.SNES{Float64}(comm; + S = PETSc.SNES{PetscScalar}(petsclib, comm; snes_rtol=1e-12, + snes_view=false, snes_monitor=true, + ksp_view=true, pc_type="none", - snes_monitor_true_residual=true, - snes_converged_reason=true); + snes_monitor_true_residual=false, + snes_converged_reason=false); S.user_ctx = user_ctx; + PETSc.setfunction!(S, FormRes!, f_g) + PETSc.setjacobian!(S, FormJacobian!, PJ, PJ) + + # Solve + PETSc.solve!(x_g, S); + # @show x_g + + # check + @test x_g[4] ≈ 29.5 + @test x_g[11] ≈ 0.63797 rtol=1e-4 + + PETSc.finalize(petsclib) + + end +end + + - #FormRes!(f_g, x_g, user_ctx) - #FormJacobian!(x_g, PJ, PJ, user_ctx) +@testset "DMStag: 2D SNES AD" begin + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + # Tell AD that it can handle Complex as scalars + ForwardDiff.can_dual(::Type{ComplexF64}) = true + ForwardDiff.can_dual(::Type{ComplexF32}) = true + + for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + + mutable struct Data_2D{PetscScalar,PetscInt} + dm + x_l + f_l + end + user_ctx = Data_2D{PetscScalar,PetscInt}(nothing, nothing, nothing); # holds data we need in the local + + function FormRes!(ptr_fx_g, ptr_x_g, user_ctx) + # Note that in PETSc, ptr_x_g and ptr_fx_g are pointers to global vectors. + + # Copy global to local vectors + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + PETSc.update!(user_ctx.f_l, ptr_fx_g, PETSc.INSERT_VALUES) + + # Retrieve arrays from the local vectors + ArrayLocal_x = PETSc.DMStagVecGetArrayRead(user_ctx.dm, user_ctx.x_l); # array with all local x-data + ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, user_ctx.f_l); # array with all local residual + + # Compute local residual + ComputeLocalResidual(user_ctx.dm, ArrayLocal_x, ArrayLocal_f, user_ctx) + + # Finalize local arrays + Base.finalize(ArrayLocal_x) + Base.finalize(ArrayLocal_f) + + # Copy local into global residual vector + PETSc.update!(ptr_fx_g, user_ctx.f_l, PETSc.INSERT_VALUES) + + end + + function ForwardDiff_res(x, user_ctx) + f = zero(x) # vector of zeros, of same type as x (local vector) + + ArrayLocal_x = PETSc.DMStagVecGetArray(user_ctx.dm, x); # array with all ocal x-data + ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, f); # array with all ocal residual + + ComputeLocalResidual(user_ctx.dm, ArrayLocal_x, ArrayLocal_f, user_ctx); + + # As the residual vector f is linked with ArrayLocal_f, we don't need to pass ArrayLocal_f back + + return f; + end + + function ComputeLocalResidual(dm, ArrayLocal_x, ArrayLocal_f, user_ctx) + # Compute the local residual. The vectors include ghost points + + # Important! Make sure you retrieve the values from the correct locations. In this example we have a + T = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_x, PETSc.DMSTAG_ELEMENT, 0); + fT = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_f, PETSc.DMSTAG_ELEMENT, 0); + + # compute the FD stencil + indices = PETSc.DMStagGetIndices(dm); # indices of (center/element) points, not including ghost values. + + sz = size(user_ctx.dm); # array length + dx = 1.0/(sz[1]-1); + dz = 1.0/(sz[2]-1); + + # set ghost points for BC'S + bnd = PETSc.DMStagGetBoundaryTypes(user_ctx.dm) + if bnd[1] == PETSc.DM_BOUNDARY_GHOSTED + T[1,:] = T[2,:]; # zero flux; dT/dx=0 + T[end,:] = T[end-1,:]; # zero flux + end + + # Diffusion @ center points + indx = indices.center.x; + indz = indices.center.y; + + ix = indx[1:end] # use ghost points in x (required GHOSTED x-boundary) + iz = indz[2:end-1] # center points + + # upper and lower BC (including corners) + fT[indx,indz[1]] = T[indx,indz[1]] .- 0.5; # bottom BC + fT[indx,indz[end]] = T[indx,indz[end]] .- 2.0; # top BC + + + fT[ix,iz] = (T[ix .+ 1,iz] - 2*T[ix,iz] + T[ix .- 1,iz])/dx^2 + + (T[ix,iz .+ 1] - 2*T[ix,iz] + T[ix,iz .- 1])/dz^2 + + end + + function FormJacobian!(ptr_x_g, J, P, user_ctx) + # This requires several steps: + # + # 1) Extract local vector from global solution (x) vector + # 2) Compute local jacobian from the residual routine (note that + # this routine requires julia vectors as input) + + # Extract the local vector + PETSc.update!(user_ctx.x_l, ptr_x_g, PETSc.INSERT_VALUES) + x = PETSc.unsafe_localarray(PetscScalar, user_ctx.x_l.ptr; write=false, read=true) + + f_Residual = (x -> ForwardDiff_res(x, user_ctx)); # pass additional rguments into the routine + J_julia = ForwardDiff.jacobian(f_Residual,x); + + # Note: since x is the LOCAL vector, J_julia also ends up having the same size. + ind = PETSc.LocalInGlobalIndices(user_ctx.dm); + if PETSc.assembled(J) == false + J = PETSc.MatSeqAIJ(sparse(J_julia[ind,ind])); + else + J .= sparse(J_julia[ind,ind]); + end + + return sparse(J_julia[ind,ind]), ind, sparse(J_julia) + end + + # Main routine starts here ---- + + dofVertex = 0 + dofEdge = 0 + dofCenter = 1 + nx,nz = 6,25 + user_ctx.dm = PETSc.DMStagCreate2d(petsclib,comm, + PETSc.DM_BOUNDARY_GHOSTED, + PETSc.DM_BOUNDARY_NONE, + nx, + nz, + 1, + 1, + dofVertex, + dofEdge, + dofCenter, + PETSc.DMSTAG_STENCIL_BOX, + 1) + PJ = PETSc.creatematrix(user_ctx.dm); # extract global) matrix from DMStag + x_g = PETSc.createglobalvector(user_ctx.dm) + f_g = PETSc.createglobalvector(user_ctx.dm) + user_ctx.x_l = PETSc.createlocalvector(user_ctx.dm) + user_ctx.f_l = PETSc.createlocalvector(user_ctx.dm) + + S = PETSc.SNES{PetscScalar}(petsclib, comm; + snes_rtol=1e-12, + snes_monitor=true, + pc_type="none", + snes_monitor_true_residual=true, + snes_converged_reason=false); + S.user_ctx = user_ctx; + + J_julia, ind, J_full = FormJacobian!(x_g, PJ, PJ, user_ctx) + PJ = PETSc.MatSeqAIJ(J_julia) # assemble non-zero structure PETSc.setfunction!(S, FormRes!, f_g) PETSc.setjacobian!(S, FormJacobian!, PJ, PJ) - # Solve - PETSc.solve!(x_g, S); + # Solve 2D system + sol = PETSc.solve!(x_g, S); - # check - # @test x_g[4] ≈ 29.5 - # @test x_g[11] ≈ 0.6792 rtol=1e-4 + PETSc.update!(user_ctx.x_l,sol, PETSc.INSERT_VALUES); # copy global solution -> local vector + T2d = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,user_ctx.x_l, PETSc.DMSTAG_ELEMENT, 0); + + @test T2d[5,5] ≈ 0.75 rtol=1e-3 + # + # ----------------- PETSc.finalize(petsclib) - #end -#end - - - -# -## -## ----------------- -# -# -## ----------------- -## 2D example -#@testset "DMSTAG_AND_SNES_2D" begin -# -#for petsclib in PETSc.petsclibs -# #PETSc.initialize() -# PetscScalar = PETSc.scalartype(petsclib) -# PetscInt = PETSc.inttype(petsclib) -# -#mutable struct Data_1 -# dm -# x_l -# f_l -#end -#user_ctx = Data_1(nothing, nothing, nothing); # holds data we need in the local -# -#dofVertex = 0 -#dofEdge = 0 -#dofCenter = 1 -#nx,nz = 14,25 -#user_ctx.dm = PETSc.DMStagCreate2d(petsclib,comm,PETSc.DM_BOUNDARY_GHOSTED,PETSc.DM_BOUNDARY_NONE,nx,nz,1,1,dofVertex,dofEdge,dofCenter,PETSc.DMSTAG_STENCIL_BOX,1) -#PJ = PETSc.DMCreateMatrix(user_ctx.dm); # extract (global) matrix from DMStag -#x_g = PETSc.DMCreateGlobalVector(user_ctx.dm) -#f_g = PETSc.DMCreateGlobalVector(user_ctx.dm) -#user_ctx.x_l = PETSc.DMCreateLocalVector(user_ctx.dm) -#user_ctx.f_l = PETSc.DMCreateLocalVector(user_ctx.dm) -# -# -#function FormRes!(cfx_g, cx_g, user_ctx) -# -# # Note that in PETSc, cx and cfx are pointers to global vectors. -# -# # Copy global to local vectors -# PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) -# PETSc.DMGlobalToLocal(user_ctx.dm, cfx_g, PETSc.INSERT_VALUES, user_ctx.f_l) -# -# # Retrieve arrays from the local vectors -# ArrayLocal_x = PETSc.DMStagVecGetArrayRead(user_ctx.dm, user_ctx.x_l); # array with all local x-data -# ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, user_ctx.f_l); # array with all local residual -# -# # Compute local residual -# ComputeLocalResidual(user_ctx.dm, ArrayLocal_x, ArrayLocal_f, user_ctx) -# -# # Finalize local arrays -# Base.finalize(ArrayLocal_x) -# Base.finalize(ArrayLocal_f) -# -# # Copy local into global residual vector -# PETSc.DMLocalToGlobal(user_ctx.dm,user_ctx.f_l, PETSc.INSERT_VALUES, cfx_g) -# -#end -# -#function ForwardDiff_res(x, user_ctx) -# f = zero(x) # vector of zeros, of same type as x (local vector) -# -# ArrayLocal_x = PETSc.DMStagVecGetArray(user_ctx.dm, x); # array with all local x-data -# ArrayLocal_f = PETSc.DMStagVecGetArray(user_ctx.dm, f); # array with all local residual -# -# ComputeLocalResidual(user_ctx.dm, ArrayLocal_x, ArrayLocal_f, user_ctx); -# -# # As the residual vector f is linked with ArrayLocal_f, we don't need to pass ArrayLocal_f back -# -# return f; -#end -# -#function ComputeLocalResidual(dm, ArrayLocal_x, ArrayLocal_f, user_ctx) -# # Compute the local residual. The vectors include ghost points -# -# T = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_x, PETSc.DMSTAG_LEFT, 0); -# fT = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_f, PETSc.DMSTAG_LEFT, 0); -# -# # P = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_x, PETSc.DMSTAG_ELEMENT, 0); -# # fP = PETSc.DMStagGetGhostArrayLocationSlot(dm,ArrayLocal_f, PETSc.DMSTAG_ELEMENT, 0); -# -# # compute the FD stencil -# sx, sn = PETSc.DMStagGetCentralNodes(dm); # indices of (center/element) points, not including ghost values. -# s, n, e = PETSc.DMStagGetCorners(user_ctx.dm); # start and end of loop including ghost points -# -# nT = length(T); # array length -# dx = 1.0/(n[1]-1); -# dz = 1.0/(n[2]-1); -# -# # set Ghost points for BC'S -# bnd = PETSc.DMStagGetBoundaryTypes(user_ctx.dm) -# if bnd[1] == PETSc.DM_BOUNDARY_GHOSTED -# T[1,:] = T[2,:]; # zero flux; dT/dx=0 -# T[end,:] = T[end-1,:]; # zero flux -# -# T[1,:] = T[end-1,:]; # zero flux; dT/dx=0 -# T[end-1,:] = T[end-1,:]; # zero flux -# end -# -# # Diffusion @ center points -# indx = sx[1]:sn[1]; # There is one more "vertex" point -# indz = sx[2]:sn[2]; -# ix = indx[1:end] # use ghost points in x -# iz = indz[2:end-1] -# fT[:,indz[1]] = T[:,indz[1] ] .- 0.5; # bottom BC -# fT[:,indz[end]] = T[:,indz[end]] .- 2.0; # top BC -# -# fT[ix,iz] = (T[ix .+ 1,iz] - 2*T[ix,iz] + T[ix .- 1,iz])/dx^2 + -# (T[ix,iz .+ 1] - 2*T[ix,iz] + T[ix,iz .- 1])/dz^2 -# -# # second, non-coupled, equation @ center points -# #ind = sx[1]:sn[1]+0; # There is one more "vertex" point -# #i = ind[2:end-1] -# #fP[ind[1]] = P[ind[1]]-30.; # left BC -# #fP[ind[end]] = P[ind[end]]-20.; # right BC -# #fP[i] = (P[i .+ 1] - 2*P[i] + P[i .- 1])/dx^2 # steady state diffusion -# -#end -# -#function FormJacobian!(cx_g, J, P, user_ctx) -# # This requires several steps: -# # -# # 1) Extract local vector from global solution (x) vector -# # 2) Compute local jacobian from the residual routine (note that -# # this routine requires julia vectors as input) -# -# # Extract the local vector -# PETSc.DMGlobalToLocal(user_ctx.dm, cx_g, PETSc.INSERT_VALUES, user_ctx.x_l) -# x = PETSc.unsafe_localarray(Float64, user_ctx.x_l.ptr; write=false, read=true) -# -# f_Residual = (x -> ForwardDiff_res(x, user_ctx)); # pass additional arguments into the routine -# -# J_julia = ForwardDiff.jacobian(f_Residual,x); -# -# # Note: since x is the LOCAL vector, J_julia also ends up having the same size. -# ind = PETSc.LocalInGlobalIndices(user_ctx.dm); -# J .= sparse(J_julia[ind,ind]); -# -# return J_julia, ind -#end -# -#S = PETSc.SNES{Float64}(comm; -# snes_rtol=1e-12, -# snes_monitor=true, -# pc_type="none", -# snes_monitor_true_residual=true, -# snes_converged_reason=true); -#S.user_ctx = user_ctx; -# -#PETSc.setfunction!(S, FormRes!, f_g) -#PETSc.setjacobian!(S, FormJacobian!, PJ, PJ) -# -## Solve 2D system -#PETSc.solve!(x_g, S); -# -#T2d = PETSc.DMStagGetGhostArrayLocationSlot(user_ctx.dm,user_ctx.x_l, PETSc.DMSTAG_LEFT, 0); -# -#@test T2d[5,5] ≈ 0.75 rtol=1e-3 -## -## ----------------- -# -## NOT WORKING YET - we do however need this when we run in parallel -##lx = zeros(Int32,1); -##ly = zeros(Int32,1); -##lz = zeros(Int32,1); -##PETSc.DMStagGetOwnershipRanges(dm_1D,lx,ly,lz) -##PETSc.finalize(petsclib) -# -#end -#end -##PETSc.finalize(petsclib) - -=# \ No newline at end of file + + end +end diff --git a/test/test_snes.jl b/test/test_snes.jl index b26aaca2..9a213ace 100644 --- a/test/test_snes.jl +++ b/test/test_snes.jl @@ -1,113 +1,89 @@ using Test using PETSc, MPI, LinearAlgebra, SparseArrays -# This shows some examples of how SNES can be used. -# there are 2 ways to use it: -# 1) you will have julia vectors (but PETSc matrices) within your user-routine -# That is fine for sequential runs. -# 2) you will receive pointers to PETSc vectors within your user routines. -# That is important for parallel simulations, where global residual -# and solution vectors are passed to the user-routines, but we work with -# local vectors - - -# Case 1) Julia vectors - -x = ones(2) -# this is an example of a case where julia vectors are passed to the routines -function fn_julia!(fx::Vector, x::Vector, args...) - fx[1] = x[1]^2 + x[1]*x[2] - 3 - fx[2] = x[1]*x[2] + x[2]^2 - 6 -end - -J = zeros(2,2) -PJ = PETSc.MatSeqDense(J) -function update_jac_julia!(x::Vector, args...) - J[1,1] = 2x[1] + x[2] - J[1,2] = x[1] - J[2,1] = x[2] - J[2,2] = x[1] + 2x[2] -end - -S_julia = PETSc.SNES{Float64}(MPI.COMM_SELF; ksp_rtol=1e-4, pc_type="none", ksp_monitor=false, ksp_converged_reason=true) -PETSc.setfunction!(S_julia, fn_julia!, PETSc.VecSeq(zeros(2))) -PETSc.setjacobian!(S_julia, update_jac_julia!, PJ, PJ) -x_julia = PETSc.VecSeq([2.0, 3.0]); -b_julia = PETSc.VecSeq([0.0, 0.0]); -PETSc.solve!(x_julia, S_julia, b_julia) -@test x_julia.array ≈ [1.0,2.0] rtol=1e-4 - - - -# Case 2) PETSc vectors -# this is an example of a case where pointers to PETSc vectors are passed to the routines -function fn!(cfx, cx, user_ctx) - # We could do Global->Local here on cfx/cx, provided a pointer to the local - # vector is available in user_ctx - x = PETSc.unsafe_localarray(Float64, cx; write=false) # read array - fx = PETSc.unsafe_localarray(Float64, cfx; read=false) # write array +MPI.Initialized() || MPI.Init() + +@testset "SNES" begin + + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + + + for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + + # This shows some examples of how SNES can be used. + # Note that you will receive pointers to PETSc vectors within your user routines. + # That is important for parallel simulations, where global residual + # and solution vectors are passed to the user-routines, but we work with + # local vectors + + function fn!(cfx, cx, user_ctx) + # We could do Global->Local here on cfx/cx, provided a pointer to the local + # vector is available in user_ctx + x_in = PETSc.unsafe_localarray(PetscScalar, cx; write=false) # read array + fx_in = PETSc.unsafe_localarray(PetscScalar, cfx; read=false) # write array + + fx_in[1] = x_in[1]^2 + x_in[1]*x_in[2] - 3 + fx_in[2] = x_in[1]*x_in[2] + x_in[2]^2 - 6 + + Base.finalize(fx_in) + Base.finalize(x_in) + end + + + function update_jac!(cx, J1, args...) + x_in = PETSc.unsafe_localarray(PetscScalar, cx; write=false) + J1[1,1] = 2x_in[1] + x_in[2] + J1[1,2] = x_in[1] + J1[2,1] = x_in[2] + J1[2,2] = x_in[1] + 2x_in[2] + + Base.finalize(x_in) + PETSc.assemble(J1) + end + + # structure with which we can pass data to the user-routines above + mutable struct Data + vec + julia + end + + julia_vec = 0; # we want pointers to local vectors + + # NOTE: for some reason, snes_monitor and ksp_monitor is not working if we specify that here. + # To be sorted out why that is (converged_reason does work) + S = PETSc.SNES{PetscScalar}(petsclib,comm; + ksp_rtol=1e-4, + pc_type="none", + ksp_monitor=true, + snes_monitor=true, + snes_converged_reason=false, + ksp_converged_reason=false) + + data = Data(PetscScalar.([100;2]), 1) + S.user_ctx = data; # we can pack anything we need in this struct + + PJ = PETSc.MatSeqDense(zeros(PetscScalar,(2,2))) + PETSc.setfunction!(S, fn!, PETSc.VecSeq( PetscScalar.(zeros(2)))) + PETSc.setjacobian!(S, update_jac!, PJ, PJ) - fx[1] = x[1]^2 + x[1]*x[2] - 3 - fx[2] = x[1]*x[2] + x[2]^2 - 6 - - Base.finalize(fx) - Base.finalize(x) -end - + x = PETSc.VecSeq(PetscScalar.([2.0, 3.0])); + b = PETSc.VecSeq(PetscScalar.([0.0, 0.0])); + PETSc.solve!(x, S, b) -function update_jac!(cx, J1::PETSc.AbstractMat{Float64}, args...) - x = PETSc.unsafe_localarray(Float64, cx; write=false) - @show J1, typeof(J1) - J1[1,1] = 2x[1] + x[2] - J1[1,2] = x[1] - J1[2,1] = x[2] - J1[2,2] = x[1] + 2x[2] + sol = PETSc.unsafe_localarray(PetscScalar, x.ptr; read=false) + @test sol ≈ [1.0,2.0] rtol=1e-4 - Base.finalize(x) - PETSc.assemble(J1) -end + # cleanup + PETSc.destroy(x); + PETSc.destroy(b); + PETSc.destroy(PJ); + #PETSc.destroy(S); -# structure with which we can pass data to the user-routines above -mutable struct Data - vec - julia + PETSc.finalize(petsclib) + end end - -julia_vec = 0; # we want pointers to local vectors -S = PETSc.SNES{Float64}(MPI.COMM_SELF, julia_vec; - ksp_rtol=1e-4, - pc_type="none", - ksp_monitor=true, - ksp_converged_reason=true) - - -data = Data([100;2], 1) -S.user_ctx = data; # we can pack anything we need in this struct - - -PETSc.setfunction!(S, fn!, PETSc.VecSeq(zeros(2))) -PJ = PETSc.MatSeqDense(zeros(2,2)) -PETSc.setjacobian!(S, update_jac!, PJ, PJ) -x = PETSc.VecSeq([2.0, 3.0]); -b = PETSc.VecSeq([0.0, 0.0]); -PETSc.solve!(x, S, b) -@test x.array ≈ [1.0,2.0] rtol=1e-4 - - -# cleanup -#finalize(x) -#finalize(J); -#PETSc.destroy(x0); -#PETSc.destroy(x); -#PETSc.destroy(b); -#PETSc.destroy(PJ); -#PETSc.destroy(S); - - - - -#include("test_dmstag.jl") - - - -#PETSc.finalize() \ No newline at end of file