Skip to content

Commit

Permalink
Merge branch 'master' of github.com:gridapapps/GridapMHD.jl into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
JordiManyer committed Jul 27, 2024
2 parents b4fc573 + cf73820 commit a364e65
Show file tree
Hide file tree
Showing 76 changed files with 239,731 additions and 90,166 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,6 @@ Manifest.toml
LocalPreferences.toml
/dev/
/docs/build/
/docs/site/
/docs/site/
*.so
.vscode
7 changes: 6 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
DrWatson = "634d3b9d-ee7a-5ddf-bec9-22491ea816e1"
FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549"
FlameGraphs = "08572546-2f56-4bcf-ba4e-bab62c3a3f89"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Gridap = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e"
GridapDistributed = "f9701e48-63b3-45aa-9a63-9bc6c271f355"
GridapGmsh = "3025c34a-b394-11e9-2a55-3fee550c04c8"
Expand All @@ -28,7 +29,11 @@ gmsh_jll = "630162c2-fc9b-58b3-9910-8442a8a132e6"
[compat]
BSON = "0.3"
FileIO = "1"
GridapDistributed = "0.3.1, 0.4"
Gridap = "0.18"
GridapDistributed = "0.4.0"
GridapP4est = "0.3.7"
GridapSolvers = "0.3.0"
GridapPETSc = "0.5.0"
MPI = "0.20"
PackageCompiler = "2"
SparseMatricesCSR = "0.6.6"
Expand Down
11 changes: 7 additions & 4 deletions analysis/Turgalium_CIEMAT/env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,16 @@ module load openmpi_gcc/4.0.4
module load gcc/11.2.0


export PETSCROOT=/home/froca/software/petsc/3.18.0_new
export JULIA_PETSC_LIBRARY=/home/froca/software/petsc/3.18.0_new/lib/libpetsc.so
export PETSCROOT=/home/froca/software/petsc/3.19.3
export JULIA_PETSC_LIBRARY=/home/froca/software/petsc/3.19.3/lib/libpetsc.so
export JULIA_MPI_BINARY=system
export JULIA_MPI_PATH=/opt/openmpi_gcc/4.0.4
export JULIA_MPI_PATH=/apps/openmpi_mlnx/4.0.4
export GMSHROOT=/home/froca/software/gmsh/4.10.5
export GRIDAPMHD=/ws/blankets/GridapMHD.jl

export OMPI_MCA_btl='^openib'
export OMPI_MCA_opal_warn_on_missing_libcuda=0


#export UCX_WARN_UNUSED_ENV_VARS=n
#export HCOLL_ML_DISABLE_SCATTERV=1
#export HCOLL_ML_DISABLE_BCAST=1
50 changes: 33 additions & 17 deletions analysis/Turgalium_CIEMAT/sendExpansion.sh
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
#!/bin/bash
#SBATCH -N 2
#SBATCH --ntasks-per-node=1
#SBATCH -t 24:00:00
#SBATCH --partition=volta
#SBATCH -N 4
#SBATCH --ntasks-per-node=4
#SBATCH -t 48:00:00
#SBATCH --partition=cpu36c

#SBATCH -o outputExp_Ha50_ser
#SBATCH -e errorExp_Ha50_ser
#SBATCH -o outputExp_Ha100_tau5000_N4n4
#SBATCH -e errorExp_Ha100_tau5000_N4n4
###SBATCH [email protected]
#SBATCH --job-name=Exp_Ha50_ser
#SBATCH --job-name=Exp_Ha100_tau5000_N4n4
#SBATCH --mem=0

SLURM_NPROCS=`expr $SLURM_JOB_NUM_NODES \* $SLURM_NTASKS_PER_NODE`
#SLURM_NPROCS = '16'

srun hostname -s > hosts.$SLURM_JOB_ID
echo "================================================================"
Expand All @@ -26,28 +27,44 @@ source env.sh
#export OMPI_MCA_btl_openib_allow_ib=1
#export OMPI_MCA_btl_openib_if_include="mlx5_0:1"

#mpiexec -n ${SLURM_NPROCS} julia --project=.. -J ../GridapMHD.so -O3 --check-bounds=no -e\
mpiexec -n ${SLURM_NPROCS} julia --project=$GRIDAPMHD -J $GRIDAPMHD/compile/Turgalium_CIEMAT/GridapMHD36c.so -O3 --check-bounds=no -e\
#Generate a file for passing some SLURM parameters to julia

PASS_FILE="pass_params.jl"

echo NPROCS=$SLURM_NPROCS > $PASS_FILE
echo JOB_NAME=\"$SLURM_JOB_NAME\" >> $PASS_FILE
Ha=${SLURM_JOB_NAME##*Ha}
Ha=${Ha%_tau*}
echo Hartmann=${Ha}.0 >> $PASS_FILE
tau=${SLURM_JOB_NAME##*tau}
tau=${tau%_*}
echo Tau=${tau}.0 >> $PASS_FILE


mpiexec -n ${SLURM_NPROCS} julia --project=$GRIDAPMHD -J $GRIDAPMHD/compile/Turgalium_CIEMAT/GridapMHD36c.so -O3 --check-bounds=no -e\
'
include("pass_params.jl")
using GridapMHD: expansion
expansion(;
mesh="68k",
np=2,
np=NPROCS,
backend=:mpi,
Ha = 50.0,
Ha = Hartmann,
N = 3740.0,
cw = 0.01,
cw = 0.028,
τ = Tau,
inlet = :parabolic,
debug=false,
vtk=true,
title="Expansion_Ha50_serial",
solver=:julia,
# petsc_options="-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_7 0",
title=JOB_NAME,
solver=:petsc,
petsc_options="-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_7 0 -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001"
)'



duration=$SECONDS
rm -f hosts.$SLURM_JOB_ID
rm -f pass_params.jl

STATUS=$?
echo "================================================================"
Expand All @@ -56,4 +73,3 @@ echo "================================================================"
echo ""
echo "STATUS = $STATUS"
echo ""

64 changes: 64 additions & 0 deletions analysis/Turgalium_CIEMAT/sendFullyDeveloped.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#!/bin/bash
#SBATCH -N 1
#SBATCH --ntasks-per-node=4
#SBATCH -t 10:00:00
#SBATCH --partition=volta

#SBATCH -o outputFD_test
#SBATCH -e errorFD_test
#SBATCH --job-name=FD_test
#SBATCH --mem=0


SLURM_NPROCS=`expr $SLURM_JOB_NUM_NODES \* $SLURM_NTASKS_PER_NODE`

srun hostname -s > hosts.$SLURM_JOB_ID
echo "================================================================"
hostname
echo "Using: ${SLURM_NPROCS} procs in ${SLURM_JOB_NUM_NODES} nodes"
echo "================================================================"
echo ""


SECONDS=0

source env.sh
#export OMPI_MCA_btl_openib_allow_ib=1
#export OMPI_MCA_btl_openib_if_include="mlx5_0:1"

#mpiexec -n ${SLURM_NPROCS} julia --project=$GRIDAPMHD -J $GRIDAPMHD/compile/Turgalium_CIEMAT/GridapMHD36c.so -O3 --check-bounds=no -e\
mpiexec -n ${SLURM_NPROCS} julia --project=$GRIDAPMHD -O3 --check-bounds=no -e\
'
using GridapMHD:FullyDeveloped
FullyDeveloped(
nc=(60,60),
np=(2,2),
backend=:mpi,
Ha = 1000.0,
b = 1.5,
dir_B = (0.0,1.0,0.0),
cw_s = 0.01,
τ_Ha = 1e5,
cw_Ha = 0.01,
τ_s = 1e5,
nsums = 100,
debug = false,
vtk = true,
title="FD_test",
mesh = false,
solver=:petsc,
petsc_options="-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001"
)'

duration=$SECONDS
rm -f hosts.$SLURM_JOB_ID
#rm -f $MACHINE_FILE

STATUS=$?
echo "================================================================"
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
echo "================================================================"
echo ""
echo "STATUS = $STATUS"
echo ""

31 changes: 15 additions & 16 deletions analysis/Turgalium_CIEMAT/sendHunt.sh
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
#!/bin/bash
#SBATCH -N 1
#SBATCH --ntasks-per-node=1
#SBATCH -t 20:00:00
#SBATCH -N 1
#SBATCH --ntasks-per-node=4
#SBATCH -t 00:10:00
#SBATCH --partition=cpu36c

#SBATCH -o outputHunt_Ha500_mumps_n1
#SBATCH -e errorHunt_Ha500_mumps_n1
#SBATCH -o outputHunt_Ha20
#SBATCH -e errorHunt_Ha20
###SBATCH [email protected]
#SBATCH --job-name=Hunt_mumps_500
#SBATCH --job-name=Hunt_Ha20
#SBATCH --mem=0


Expand All @@ -33,25 +33,24 @@ source env.sh
#export OMPI_MCA_btl_openib_allow_ib=1
#export OMPI_MCA_btl_openib_if_include="mlx5_0:1"

#mpiexec -n ${SLURM_NPROCS} --mca btl_openib_allow_ib 1 --mca btl_openib_if_include mlx5_0 julia --project=$GRIDAPMHD -J $GRIDAPMHD/GridapMHD.so -O3 --check-bounds=no -e\
julia --project=$GRIDAPMHD -J $GRIDAPMHD/compile/Turgalium_CIEMAT/GridapMHD36c.so -O3 --check-bounds=no -e\
mpiexec -n ${SLURM_NPROCS} julia --project=$GRIDAPMHD -J $GRIDAPMHD/compile/Turgalium_CIEMAT/GridapMHD36c.so -O3 --check-bounds=no -e\
'
using GridapMHD: hunt
hunt(
nc=(50,50),
# np=(2,2),
# backend=:mpi,
nc=(20,20),
np=(2,2),
backend=:mpi,
L=1.0,
B=(0.,500.,0.),
nsums = 1000,
B=(0.,20.,0.),
nsums = 100,
debug=false,
vtk=true,
title="hunt_500_petsc_n1",
title="hunt_Ha20",
mesh = false,
BL_adapted = true,
solver=:petsc,
petsc_options="-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps",
)'
petsc_options="-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001"
)'



Expand Down
109 changes: 109 additions & 0 deletions analysis/Turgalium_CIEMAT/sendTube.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
#!/bin/bash
#SBATCH -N 1
#SBATCH --ntasks-per-node=12
#SBATCH -t 90:00:00
#SBATCH --partition=volta

#SBATCH -o outputTube
#SBATCH -e errorTube
#SBATCH --job-name=Tube
#SBATCH --mem=0

SLURM_NPROCS=`expr $SLURM_JOB_NUM_NODES \* $SLURM_NTASKS_PER_NODE`
#SLURM_NPROCS = '16'

srun hostname -s > hosts.$SLURM_JOB_ID
echo "================================================================"
hostname
echo "Using: ${SLURM_NPROCS} procs in ${SLURM_JOB_NUM_NODES} nodes"
echo "================================================================"
echo ""


SECONDS=0

#Load the enviroment
source env.sh

#Generate a file for passing some SLURM parameters to julia

PASS_FILE="pass_params.jl"

echo NPROCS=$SLURM_NPROCS > $PASS_FILE
echo JOB_NAME=\"$SLURM_JOB_NAME\" >> $PASS_FILE

#Operational parameters defining the problem

echo Ha = 1000.0 >> $PASS_FILE
echo Re = 1.0 >> $PASS_FILE
echo cw = 0.0 >> $PASS_FILE
echo τ = 1e6 >> $PASS_FILE

#Parameters for the mesh construction

echo R = 1.0 >> $PASS_FILE #Radious of the pipe (used for normalization)
echo p = 0.9 >> $PASS_FILE #Edge of the BL region
echo q = 0.5 >> $PASS_FILE #Core radious
echo L = 4.0 >> $PASS_FILE #Length of the pipe

echo N_r = 16 >> $PASS_FILE #Radial nodes in the BL region
echo n = 4 >> $PASS_FILE #Radial nodes in the Hartmann BL (1/Ha)
echo N_a = 60 >> $PASS_FILE #Azimutal nodes
echo N_L = 20 >> $PASS_FILE #Nodes in the axial direction
echo n_c = 0.18 >> $PASS_FILE #Maximum size of the cells edges in the core region (for hybrid only)

#Generate the mesh

#For a hybrid (block BL and unstructured core) mesh
export mesh_path=/ws/blankets/GridapMHD.jl/meshes/tube_hybrid

#For a block mesh (but stil tetra)
#export mesh_path=/ws/blankets/GridapMHD.jl/meshes/tube_block

source $mesh_path/MeshGenerator.sh "$mesh_path/../tube_computed.msh"

#GridapMHD computation
mpiexec -n ${SLURM_NPROCS} --mca btl_openib_if_include mlx5_0 julia --project=$GRIDAPMHD -O3 --check-bounds=no -e\
'
include("pass_params.jl")
using GridapPETSc
using SparseMatricesCSR
using GridapMHD: tube
#Monolithic MUMPS
solver = Dict(
:solver => :petsc,
:matrix_type => SparseMatrixCSR{0,PetscScalar,PetscInt},
:vector_type => Vector{PetscScalar},
:petsc_options => "-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001",
:niter => 100,
:rtol => 1e-5,
)
tube(;
mesh="computed",
np=NPROCS,
backend=:mpi,
Ha = Ha,
N = Ha^2/Re,
inlet = :plane,
cw = cw,
τ = τ,
debug=false,
vtk=true,
title=JOB_NAME,
solver=solver
# petsc_options="-snes_monitor -ksp_error_if_not_converged true -ksp_converged_reason -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_7 0 -mat_mumps_icntl_28 1 -mat_mumps_icntl_29 2 -mat_mumps_icntl_4 3 -mat_mumps_cntl_1 0.001"
)'

duration=$SECONDS
rm -f hosts.$SLURM_JOB_ID
rm -f pass_params.jl

STATUS=$?
echo "================================================================"
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
echo "================================================================"
echo ""
echo "STATUS = $STATUS"
echo ""
Loading

0 comments on commit a364e65

Please sign in to comment.