Skip to content

Commit

Permalink
Merge branch 'master' of github.com:gridapapps/GridapMHD.jl into soli…
Browse files Browse the repository at this point in the history
…d_coupling
  • Loading branch information
pmartorell committed Jul 24, 2024
2 parents 9ea6729 + 1e43c4d commit cf34acb
Show file tree
Hide file tree
Showing 25 changed files with 1,820 additions and 126 deletions.
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
DrWatson = "634d3b9d-ee7a-5ddf-bec9-22491ea816e1"
FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549"
FlameGraphs = "08572546-2f56-4bcf-ba4e-bab62c3a3f89"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Gridap = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e"
GridapDistributed = "f9701e48-63b3-45aa-9a63-9bc6c271f355"
GridapGmsh = "3025c34a-b394-11e9-2a55-3fee550c04c8"
Expand Down
72 changes: 72 additions & 0 deletions analysis/hercules/channels.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
using DrWatson
include("scripts/mpi_scripts.jl")

setup = "
using SparseMatricesCSR
using GridapPETSc
using GridapMHD: snes_postpro"
solver = "Dict(
:solver => :petsc,
:matrix_type => SparseMatrixCSR{0,PetscScalar,PetscInt},
:vector_type => Vector{PetscScalar},
:solver_postpro => ((cache,info) -> snes_postpro(cache,info)),
:petsc_options => \"-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001\",
:niter => 100,
:rtol => 1e-5,
)"

function run_channel(dict)
run_channel(;dict2ntuple(dict)...)
end

function run_channel(;force=false,L=1,w=0.1,kwargs...)

@unpack np,nc,Ha,Re,niter,convection = kwargs

N = prod(nc)
prefix = "channel_N$N"

jobname = savename(prefix,kwargs,equals="",ignores=["np"])
pvtu = datadir("$jobname.pvtu")
if !force && isfile(pvtu)
@info "Skipping $jobname"
return
else
@info "Running $jobname"
end

run_driver(
app="channel",
setup=setup,
nc=nc,
np=np,
backend=:mpi,
sizes=(L,w,w),
ν=1,
B0=Ha/w,
u0=Re/w,
bl_orders=(1,2,2),
solver=":petsc",
niter=1,
inlet=:shercliff,
initial_value=:zero,
convection=convection,
title=jobname,
vtk=true)
end


params = Dict(
:Ha => [100,],
:Re => 10,
:niter => 1,
:np => (2,4,2),
:nc => [(10,10,10),],
:convection => true,
:initial_value => :inlet,
:force => false
)

for p in dict_list(params)
run_channel(p)
end
73 changes: 73 additions & 0 deletions analysis/hercules/expansions.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@

using DrWatson
include("scripts/mpi_scripts.jl")

setup = "
using GridapMHD: expansion
using SparseMatricesCSR
using GridapPETSc
using GridapMHD: snes_postpro"
solver = "Dict(
:solver => :petsc,
:matrix_type => SparseMatrixCSR{0,PetscScalar,PetscInt},
:vector_type => Vector{PetscScalar},
:solver_postpro => ((cache,info) -> snes_postpro(cache,info)),
:petsc_options => \"-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001\",
:niter => 100,
:rtol => 1e-5,
)"

function run_expansion(dict)
run_expansion(;dict2ntuple(dict)...)
end

function run_expansion(;force=false,kwargs...)

prefix = "expansion"

jobname = savename(prefix,kwargs,equals="",ignores=["np"])
@unpack np,mesh,Ha,Re,niter,convection = kwargs
pvtu = datadir("$jobname.pvtu")
if !force && isfile(pvtu)
@info "Skipping $jobname"
return
else
@info "Running $jobname"
end

run_driver(
app="expansion",
setup=setup,
mesh=mesh,
np=np,
backend=:mpi,
Ha = Ha,
N = Ha^2/Re,
cw = 0.0,
Z = 4.0,
b = 1.0,
inlet=:shercliff,
debug=false,
vtk=true,
title=jobname,
solver=solver,
initial_value=:zero,
niter=niter,
convection=convection)
end



params = Dict(
:Ha => [10,100],
:Re => 1,
:niter => 1,
:np => 16,
:mesh => ["710","6k"],
:convection => false,
)


for p in dict_list(params)
run_expansion(p)
end
78 changes: 78 additions & 0 deletions analysis/hercules/nonuniform_B.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
using DrWatson
include("scripts/mpi_scripts.jl")

setup = "
using SparseMatricesCSR
using GridapPETSc
using GridapMHD: snes_postpro"
solver = "Dict(
:solver => :petsc,
:matrix_type => SparseMatrixCSR{0,PetscScalar,PetscInt},
:vector_type => Vector{PetscScalar},
:solver_postpro => ((cache,info) -> snes_postpro(cache,info)),
:petsc_options => \"-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001\",
:niter => 100,
:rtol => 1e-5,
)"

function run_channel(dict)
run_channel(;dict2ntuple(dict)...)
end

function run_channel(;force=false,L=1,w=0.1,kwargs...)

@unpack np,nc,Ha,Re,niter,convection,nonuniform_B,γ = kwargs

N = prod(nc)
prefix = "channel_N$N"

jobname = savename(prefix,kwargs,equals="",ignores=["np"])
pvtu = datadir("$jobname.pvtu")
if !force && isfile(pvtu)
@info "Skipping $jobname"
return
else
@info "Running $jobname"
end

run_driver(
app="channel",
setup=setup,
nc=nc,
np=np,
backend=:mpi,
sizes=(L,w,w),
ν=1,
B0=Ha/w,
u0=Re/w,
bl_orders=(1,2,2),
solver=":petsc",
niter=10,
inlet=:shercliff,
initial_value=:zero,
convection=convection,
nonuniform_B=nonuniform_B,
γB=γ,
title=jobname,
vtk=true)
end


params = Dict(
:Ha => [100],
:Re => [10],
:niter => nothing,
:np => (4,2,2),
:nc => [(40,20,10)],
L=30,
w=2,
:nonuniform_B => true,
=> 0.45,
:convection => true,
:initial_value => :inlet,
:force => true,
)

for p in dict_list(params)
run_channel(p)
end
57 changes: 57 additions & 0 deletions analysis/hercules/scripts/mpi_scripts.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
using MPI

projectdir = joinpath(@__DIR__,"..","..","..") |> abspath
sysimage = joinpath(projectdir,"GridapMHD.so")

function num_procs(;np,backend,kwargs...)
@assert backend == :mpi
prod(np)
end

function taskcmd(;app="channel",precompile=false,setup="",solver=":julia",kwargs...)
kw = str_kwargs(;kwargs...)
prec = precomp(;precompile,kwargs...)
str = "
using MPI
MPI.Init()
if MPI.Comm_rank(MPI.COMM_WORLD) == 0
println(\"Num Procs:\",MPI.Comm_size(MPI.COMM_WORLD))
end
using GridapMHD: $app
$setup
solver = $solver
$(prec)
@time $app(;$kw,solver=solver)
"
str
end

function precomp(;precompile,app="channel",np=(1,1,1),nc=np,solver=:julia,kwargs...)
str = ""
if precompile
nc=np.*2
str *= "@time $app(;np=$np,nc=$nc,solver=:$solver,backend=:mpi)"
end
str
end

function str_kwargs(;tabsize=4,kwargs...)
NamedTuple(kwargs)
tab = ' ' ^ tabsize
str = string(NamedTuple(kwargs))
str = str[2:end-1]
str
end

function run_driver(;kwargs...)
args = `-O3 --check-bounds=no -J$sysimage --project=.`
args = `-O3 --check-bounds=no --project=.`
args = `--project=$projectdir`
MPI.mpiexec() do cmd
n = num_procs(;kwargs...)
task = taskcmd(;kwargs...)
_cmd = `$(cmd) -np $n --oversubscribe $(Base.julia_cmd()) $args -e $task`
@show _cmd
run(_cmd)
end
end
30 changes: 30 additions & 0 deletions analysis/timestepping/RunSpatialConv.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
module SpaceConv

using GridapMHD
using GridapMHD: transient
using GridapPETSc, SparseMatricesCSR
using DrWatson

tf = 1.0
man_solution = [:lineartime_nonfespace,:stationary_nonfespace]
Δt = 0.5
n = 2 .^ (1:4)

params = ntuple2dict((;Δt,tf,man_solution,n))

all_params = dict_list(params)

prefix = "transient"
default = (;vtk=false)

for p in all_params
title = savename(prefix,p)
println("--------------------------------------")
println("Running $title")
println("--------------------------------------")
transient(;title,default...,p...)
end



end # module
30 changes: 30 additions & 0 deletions analysis/timestepping/RunTransientConv.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
module TransientConv

using GridapMHD
using GridapMHD: transient
using GridapPETSc, SparseMatricesCSR
using DrWatson

n = 2
tf = 1.0
man_solution = [:nonlineartime_fespace]
Δt = 2. .^ -(0:4)

params = ntuple2dict((;Δt,tf,man_solution,n))

all_params = dict_list(params)

prefix = "transient"

default = (;vtk=false)

for p in all_params
title = savename(prefix,p)
println("--------------------------------------")
println("Running $title")
println("--------------------------------------")
transient(;title,default...,p...)
end


end # module
Loading

0 comments on commit cf34acb

Please sign in to comment.