Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate particle source #84

Closed
wants to merge 43 commits into from
Closed
Show file tree
Hide file tree
Changes from 32 commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
83a4fd6
Use the same function for 1D and 2D source
odstrcilt Jun 20, 2023
2b0f45b
Update Integrate source over simulation timestep
odstrcilt Jun 20, 2023
8489de4
Support overlapping lbo injections
odstrcilt Jul 9, 2023
5493032
Issue if the puff step started at t=0
odstrcilt Jul 9, 2023
61164d1
Extrapolate source by zero
odstrcilt Jul 10, 2023
a4ebb89
Divertor source will be in #/s units
odstrcilt Jul 10, 2023
b08edb6
Load trust_SOL_Ti parameter from namelist
odstrcilt Jul 17, 2023
9454155
Update source_utils.py
odstrcilt Jul 27, 2023
ba96e26
Try to remove numpy
odstrcilt Jul 27, 2023
660c410
Don't allow CX calculation without CCD file
odstrcilt Aug 1, 2023
1aa4524
Raise error if MDS data does not exist
odstrcilt Aug 17, 2023
2f6fcc6
bug
odstrcilt Sep 15, 2023
6d94e8d
Update tests.yml
odstrcilt Nov 29, 2023
fad4e61
Update python-publish.yml
odstrcilt Nov 29, 2023
7c79ba3
Update requirements.txt
odstrcilt Nov 29, 2023
4d65ea2
Update requirements.txt
odstrcilt Nov 29, 2023
efd62d3
Update requirements.txt
odstrcilt Nov 29, 2023
a8cdbc7
Update python-publish.yml
odstrcilt Nov 29, 2023
acd33c7
Update python-publish.yml
odstrcilt Nov 29, 2023
a06324d
Update python-publish.yml
odstrcilt Nov 29, 2023
611c97f
reverse some changes
odstrcilt Nov 29, 2023
a2f797f
Update tests.yml
odstrcilt Nov 29, 2023
a493f68
Update tests.yml
odstrcilt Nov 29, 2023
7420d65
Fix os.sep issue with url
odstrcilt Nov 29, 2023
360ec27
Issue with near axis interpolation
odstrcilt Nov 29, 2023
87faa1e
wrong units
odstrcilt Nov 29, 2023
c17e33b
Changes for steady state solver
odstrcilt Nov 29, 2023
57606cc
fix bug
odstrcilt Nov 29, 2023
8d5a0f0
Update source_utils.py
odstrcilt Nov 29, 2023
bfb775f
more robust loading of ADF files
odstrcilt Nov 29, 2023
40b1afe
Update python-publish.yml
odstrcilt Nov 29, 2023
c36b67b
Update tests.yml
odstrcilt Nov 29, 2023
1678ff6
Update adas_files.py
odstrcilt Dec 14, 2023
31c321b
Update tests.yml
odstrcilt Dec 14, 2023
04d73a3
Update adas_files.py
odstrcilt Jan 29, 2024
690e502
Add titan files
odstrcilt Jul 19, 2024
be679dc
merge with master
odstrcilt Aug 1, 2024
2f03a12
Update python-publish.yml
odstrcilt Aug 1, 2024
4b020fd
Merge remote-tracking branch 'origin/master' into integrated_source
odstrcilt Aug 1, 2024
e863f1f
Merge branch 'integrated_source' of https://github.com/fsciortino/Aur…
odstrcilt Aug 1, 2024
7e47f86
Merge branch 'titan' into integrated_source
odstrcilt Aug 1, 2024
4556d0a
Update python-publish.yml
odstrcilt Aug 1, 2024
feb9186
Update pyproject.toml
odstrcilt Aug 1, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,13 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
python-version: '3.9'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel numpy
pip install setuptools wheel
pip install --upgrade setuptools
python -m pip install -r requirements.txt
- name: Build the package
run: python setup.py sdist
- name: Publish distribution 📦 to Test PyPI
Expand Down
7 changes: 5 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Update pip
run: pip install --upgrade pip
- name: Install dependencies
run: |
python -m pip install -r requirements.txt
run: python -m pip install -r requirements.txt
- name: Check numpy import
run: python -c "import numpy; print(numpy.__version__)"
- name: Install Aurora
run: ${{ matrix.build_command }}
- name: Move to root and try importing Aurora
Expand Down
11 changes: 6 additions & 5 deletions aurora/adas_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
)


def get_adas_file_loc(filename, filetype="adf11"):
def get_adas_file_loc(filename, filetype="adf11", tmp_folder=None):
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@odstrcilt why do you need this tmp_folder here?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have no idea. I will fix it.

"""Find location of requested atomic data file for the indicated ion.
Accepts all ADAS "derived" data file types
(adf11, adf12, adf13, adf15, adf21, adf22).
Expand Down Expand Up @@ -75,12 +75,12 @@ def fetch_file(filename, filetype, loc):

url = "https://open.adas.ac.uk/download/" + filetype + '/'
if filetype == "adf11":
filename_mod = filename.split("_")[0] + os.sep + filename
filename_mod = filename.split("_")[0] + '/' + filename

elif filetype in ["adf12", "adf13", "adf21", "adf22"]:
filename_mod = (
filename.replace("#", "][").split("_")[0]
+ os.sep
+ '/'
+ filename.replace("#", "][")
)

Expand All @@ -99,15 +99,15 @@ def fetch_file(filename, filetype, loc):
else:
# patterns may be different, attempt simple guess:
filename_mod = (
filename.split("_")[0] + os.sep + filename.replace("#", "][")
filename.split("_")[0] + '/' + filename.replace("#", "][")
)
else:
raise ValueError(
"ADAS file type/format not recognized.\n"
+ "Could not find it or download it automatically!"
)

r = requests.get(url + os.sep + filename_mod)
r = requests.get(url + '/' + filename_mod)


if len(r.text) < 1000:
Expand Down Expand Up @@ -180,6 +180,7 @@ def adas_files_dict():
files["H"]["brs"] = "brs05360.dat"
files["H"]["pbs"] = "pbsx7_h.dat"
files["H"]["prc"] = "prc89_h.dat"
files["D"] = files["H"]
files["He"] = {} # 2
files["He"]["acd"] = "acd96_he.dat"
files["He"]["scd"] = "scd96_he.dat"
Expand Down
2 changes: 1 addition & 1 deletion aurora/atomic.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def load(self):
self.MPRT.append(int(iprt.split("=")[1]))
except:
# some old files have different header
self.Z.append(ind + 1)
self.Z.append(ind)
self.MGRD.append(1)
self.MPRT.append(1)

Expand Down
2 changes: 1 addition & 1 deletion aurora/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ def rad_coord_transform(x, name_in, name_out, geqdsk):
# Interpolate to transform coordiantes
if name_in == "rhon":
coord_in = rhon_ref
if name_in == "psin":
elif name_in == "psin":
coord_in = psin_ref
elif name_in == "rhop":
coord_in = rhop_ref
Expand Down
66 changes: 41 additions & 25 deletions aurora/core.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""This module includes the core class to set up simulations with :py:mod:`aurora`. The :py:class:`~aurora.core.aurora_sim` takes as input a namelist dictionary and a g-file dictionary (and possibly other optional argument) and allows creation of grids, interpolation of atomic rates and other steps before running the forward model.
"""
"""
# MIT License
#
# Copyright (c) 2021 Francesco Sciortino
Expand Down Expand Up @@ -66,7 +66,7 @@ def __init__(self, namelist, geqdsk=None):
# make sure that any changes in namelist will not propagate back to the calling function
self.namelist = deepcopy(namelist)
self.geqdsk = geqdsk # if None, minor (rvol_lcfs) and major radius (Raxis_cm) must be in namelist
self.kin_profs = self.namelist["kin_profs"]

self.imp = namelist["imp"]

# import here to avoid issues when building docs or package
Expand Down Expand Up @@ -107,7 +107,10 @@ def __init__(self, namelist, geqdsk=None):
atom_files["ccd"] = self.namelist.get(
"ccd", adas_files.adas_files_dict()[self.imp]["ccd"]
)

if atom_files["ccd"] is None:
raise Exception('Missing CCD ADF11 file!')


if self.namelist.get("metastable_flag", False):
atom_files["qcd"] = self.namelist.get(
"qcd", adas_files.adas_files_dict()[self.imp].get("qcd", None)
Expand All @@ -117,16 +120,20 @@ def __init__(self, namelist, geqdsk=None):
)

# now load ionization and recombination rates

self.atom_data = atomic.get_atom_data(self.imp, files=atom_files)

# allow for ion superstaging
self.superstages = self.namelist.get("superstages", [])

# set up radial and temporal grids
self.setup_grids()

# set up kinetic profiles and atomic rates
self.setup_kin_profs_depts()

#interpolate kinetics profiles of the profiles are in namelist
if "kin_profs" in self.namelist:
self.kin_profs = self.namelist["kin_profs"]
# set up kinetic profiles and atomic rates
self.setup_kin_profs_depts()

def reload_namelist(self, namelist=None):
"""(Re-)load namelist to update scalar variables."""
Expand Down Expand Up @@ -248,26 +255,23 @@ def setup_kin_profs_depts(self):
if len(save_time) == 1: # if time averaged profiles were used
Sne0 = Sne0[:, [0]] # 0th charge state (neutral)


# get time history and radial profiles separately
source_time_history = source_utils.get_source_time_history(
self.namelist, self.Raxis_cm, self.time_grid
) # units of particles/s/cm for 1D source or particles/s/cm^3 for 2D source


if self.namelist["source_type"] == "arbitrary_2d_source":
# interpolate explicit source values on time and rhop grids of simulation
# NB: explicit_source_vals should be in units of particles/s/cm^3 <-- ionization rate
srho = self.namelist["explicit_source_rhop"]
stime = self.namelist["explicit_source_time"]
source = np.array(self.namelist["explicit_source_vals"]).T

spl = RectBivariateSpline(srho, stime, source, kx=1, ky=1)
# extrapolate by the nearest values
self.source_rad_prof = spl(
np.clip(self.rhop_grid, min(srho), max(srho)),
np.clip(self.time_grid, min(stime), max(stime)),
)
self.source_rad_prof = interp1d(srho, source_time_history.T,
axis=0,bounds_error=False,
fill_value=0)(self.rhop_grid)
# Change units to particles/cm^3
self.src_core = self.source_rad_prof / Sne0
else:
# get time history and radial profiles separately
source_time_history = source_utils.get_source_time_history(
self.namelist, self.Raxis_cm, self.time_grid
) # units of particles/s/cm

# get radial profile of source function for each time step
# dimensionless, normalized such that pnorm=1
Expand All @@ -284,7 +288,6 @@ def setup_kin_profs_depts(self):
self.src_core = self.source_rad_prof * source_time_history[None, :]

self.src_core = np.asfortranarray(self.src_core)

# if wall_recycling>=0, return flows from the divertor are enabled
if (
self.wall_recycling >= 0
Expand Down Expand Up @@ -479,10 +482,19 @@ def set_time_dept_atomic_rates(self, superstages=[], metastables=False):

# cache radiative & dielectronic recomb:
self.alpha_RDR_rates = Rne

if self.namelist["cxr_flag"]:
# Get an effective recombination rate by summing radiative & CX recombination rates
self.alpha_CX_rates = out.pop(0) * (self._n0 / self._ne)[:, None]
alpha_CX_rates = out.pop(0) * (self._n0 / self._ne)[:, None]
#metastable resolved CCD file exist only for C and N, use unresolved file for others
if metastables and alpha_CX_rates.shape[1] != Rne.shape[1]:
self.alpha_CX_rates = np.zeros_like(Rne)
for i, m in enumerate(self.atom_data["ccd"].meta_ind[1:]):
j = self.atom_data["acd"].meta_ind.index(m)
self.alpha_CX_rates[:,j] = alpha_CX_rates[:,i]
else:
self.alpha_CX_rates = alpha_CX_rates

Rne = (
Rne + self.alpha_CX_rates
) # inplace addition would change also self.alpha_RDR_rates
Expand All @@ -491,13 +503,17 @@ def set_time_dept_atomic_rates(self, superstages=[], metastables=False):
# include charge exchange between NBI neutrals and impurities
self.nbi_cxr = interp1d(
self.namelist["nbi_cxr"]["rhop"],
self.namelist["nbi_cxr"]["vals"],
axis=0,
np.atleast_3d(self.namelist["nbi_cxr"]["vals"]).T,
bounds_error=False,
fill_value=0.0,
)(self.rhop_grid)

if self.nbi_cxr.shape[2] > 1:
#time-dependent, times are switch times of beams, values are mean in between switch times
it = self.namelist["nbi_cxr"]["times"].searchsorted(self.time_grid)
self.nbi_cxr = self.nbi_cxr[it]

Rne = Rne + self.nbi_cxr.T[None, :, :]
Rne = Rne + self.nbi_cxr

if len(superstages):
self.superstages, Rne, Sne, self.fz_upstage = atomic.superstage_rates(
Expand Down
6 changes: 3 additions & 3 deletions aurora/grids_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -732,14 +732,15 @@ def estimate_boundary_distance(shot, device, time_ms):
shot=shot,
TDI="\\EFIT01::TOP.RESULTS.A_EQDSK.ORIGHT",
) # CMOD format, take ORIGHT
assert tmp.check()
except Exception:
tmp = OMFITmdsValue(
server=device,
treename="EFIT01",
shot=shot,
TDI="\\EFIT01::TOP.RESULTS.AEQDSK.GAPOUT",
) # useful variable for many other devices

time_vec = tmp.dim_of(0)
data_r = tmp.data()

Expand All @@ -749,8 +750,7 @@ def estimate_boundary_distance(shot, device, time_ms):

# take separation to limiter to be 2/3 of the separation to the wall boundary
lim_sep = round(bound_sep * 2.0 / 3.0, 3)

return bound_sep, lim_sep
return bound_sep*100, lim_sep*100


def vol_int(var, rvol_grid, pro_grid, Raxis_cm, rvol_max=None):
Expand Down
7 changes: 4 additions & 3 deletions aurora/interp.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,10 +109,11 @@ def interp_quad(x, y, d, rLCFS, r):
"""Function 'interp' used for kinetic profiles."""
f = interp1d(x, np.log(y), kind="quadratic", assume_sorted=True, copy=False)
idx = np.searchsorted(r, rLCFS)
core = np.exp(f(np.clip(r[:idx] / rLCFS, 0, x[-1])))
core = np.exp(f(np.clip(r[:idx] / rLCFS, x[0], x[-1])))
edge = core[..., [idx - 1]] * np.exp(
-np.outer(1.0 / np.asarray(d), r[idx:] - r[idx - 1])
)


return np.concatenate([core, edge], axis=-1)

Expand All @@ -122,12 +123,12 @@ def interpa_quad(x, y, rLCFS, r):
f = interp1d(
x,
np.log(y),
bounds_error=False,
kind="quadratic",
assume_sorted=True,
copy=False,
)
return np.exp(f(np.minimum(r / rLCFS, x[-1])))

return np.exp(f(np.clip(r / rLCFS, x[0], x[-1])))


def interp(x, y, rLCFS, r):
Expand Down
7 changes: 6 additions & 1 deletion aurora/radiation.py
Original file line number Diff line number Diff line change
Expand Up @@ -670,7 +670,12 @@ def read_adf15(path, order=1):
# Get the expected number of lines by reading the header:
num_lines = int(header[0])
spec = header[1].strip("/")
Z = int(''.join(header[2:-3]).strip(':'))

# header formating is not strictly defined and this can sometimes fail
try:
Z = int(''.join(header[2:-3]).strip(':'))
except:
Z = 0

log10pec_dict = {}
for i in range(num_lines):
Expand Down
Loading
Loading