diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000..fe8ad2d
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,7 @@
+before_script:
+ - pip install -r reqs.txt
+
+run-test:
+ script:
+ - python setup.py install
+ - pytest ./tests/unit_test.py
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..7163688
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,11 @@
+Copyright 2020 Jin Whan Bae
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/cyclus_gui/gui/arche_window.py b/cyclus_gui/gui/arche_window.py
index f01d28e..d3f0374 100644
--- a/cyclus_gui/gui/arche_window.py
+++ b/cyclus_gui/gui/arche_window.py
@@ -50,10 +50,8 @@ def __init__(self, master, output_path):
messagebox.showinfo('Found', 'Found Cyclus, automatically grabbing archetype libraries :)')
except:
try:
- # try to download m.json from gitlab
- url = 'https://code.ornl.gov/4ib/cyclus_gui/raw/master/src/m.json'
- urllib.request.urlretrieve(url, self.meta_file_path)
- self.arche = self.read_metafile(self.meta_file_path)
+ # try to download m.json from gitlab
+ self.arche = self.get_metafile_from_git(self.meta_file_path)
messagebox.showinfo('Downloaded', 'Downloaded metadata from https://code.ornl.gov/4ib/cyclus_gui/\nIt seems like you do not have Cyclus.\n So I filled this for you :)')
except:
messagebox.showinfo('No Internet', 'No internet, so we are going to use metadata saved in the package.\n Using all cyclus/cycamore arcehtypes as default.')
@@ -79,6 +77,51 @@ def __init__(self, master, output_path):
# status window
self.update_loaded_modules_window()
+ self.check_duplicate()
+
+
+ def get_metafile_from_git(self, meta_file_path):
+ url = 'https://code.ornl.gov/4ib/cyclus_gui/raw/master/src/m.json'
+ urllib.request.urlretrieve(url, meta_file_path)
+ arche = self.read_metafile(meta_file_path)
+ return arche
+
+
+ def check_duplicate(self):
+ prev = []
+ duplicate = []
+ for a in self.arche:
+ if a[1] not in prev:
+ prev.append(a[1])
+ else:
+ duplicate.append(a[1])
+ if duplicate:
+ self.duplicates = True
+ self.duplicate_window_dict = {}
+ for i in duplicate:
+ self.choose_between_duplicate(i)
+
+
+
+ def choose_between_duplicate(self, duplicate_name):
+ self.duplicate_window_dict[duplicate_name] = Toplevel(self.master)
+ self.duplicate_window_dict[duplicate_name].title('Choice!')
+ Label(self.duplicate_window_dict[duplicate_name], text='Duplicate archetype name. Pick one to keep:').pack()
+ j = [i for i in self.arche if i[1] == duplicate_name]
+ for i in j:
+ Button(self.duplicate_window_dict[duplicate_name], text='%s:%s' %(i[0],i[1]), command=lambda:self.delete_all_but(i, j)).pack()
+
+
+ def delete_all_but(self, chosen, all_):
+ for i in all_:
+ if i != chosen:
+ self.delete_arche(i)
+ self.duplicate_window_dict[i[1]].destroy()
+ self.duplicates = False
+ self.update_loaded_modules_window()
+
+
+
def import_libraries(self, local):
@@ -270,12 +313,19 @@ def add(self):
def done(self):
+
+ self.check_duplicate()
+ if self.duplicates:
+ messagebox.showerror('Check Duplicates', 'See if you have any duplicate archetype names!\nThere should be a window that tells you to choose one.')
+ return
+
string = '\n'
for pair in self.arche:
string += '\t\t%s\t%s\n' %(pair[0], pair[1])
string += '\n'
with open(os.path.join(self.output_path, 'archetypes.xml'), 'w') as f:
f.write(string)
+ messagebox.showinfo('Success', 'Successfully created archetype file!')
self.master.destroy()
diff --git a/cyclus_gui/gui/backend_window.py b/cyclus_gui/gui/backend_window.py
index fe2715e..f2756fe 100644
--- a/cyclus_gui/gui/backend_window.py
+++ b/cyclus_gui/gui/backend_window.py
@@ -2,7 +2,6 @@
from tkinter import messagebox
from tkinter import filedialog
from tkinter.scrolledtext import ScrolledText
-import xmltodict
import uuid
import os
import shutil
@@ -19,7 +18,7 @@
class BackendWindow(Frame):
- def __init__(self, master, output_path):
+ def __init__(self, master, output_path, filename='cyclus.sqlite'):
"""
does backend analysis
"""
@@ -31,7 +30,7 @@ def __init__(self, master, output_path):
self.output_path = output_path
self.master.geometry('+0+%s' %int(self.screen_height/4))
self.configure_window()
- self.get_cursor()
+ self.get_cursor(filename)
self.get_id_proto_dict()
self.get_start_times()
@@ -129,8 +128,8 @@ def get_id_proto_dict(self):
self.id_proto_dict[agent['agentid']] = agent['prototype']
- def get_cursor(self):
- con = lite.connect(os.path.join(self.output_path, 'cyclus.sqlite'))
+ def get_cursor(self, filename='cyclus.sqlite'):
+ con = lite.connect(os.path.join(self.output_path, filename))
con.row_factory = lite.Row
self.cur = con.cursor()
@@ -657,7 +656,7 @@ def plot_flow(self):
maxy = 14 * len(flow_clean)
y_coords = np.linspace(0, maxx, len(flow_clean))[::-1]
x_coords = np.linspace(0, maxy, max([len(q) for q in flow_clean]))
- xgap = x_coords[1] - x_coords[0]
+ # xgap = x_coords[1] - x_coords[0]
ygap = y_coords[1] - y_coords[0]
uniq_commods = list(set(df['Commodity']))
diff --git a/cyclus_gui/gui/gui.py b/cyclus_gui/gui/gui.py
index 9d103aa..55b49b7 100644
--- a/cyclus_gui/gui/gui.py
+++ b/cyclus_gui/gui/gui.py
@@ -31,10 +31,10 @@
os_ = platform.system()
print('Your OS is:', os_)
-if 'windows' in os_.lower():
- windows=True
+if 'windows' in os_.lower() or 'linux' in os_.lower():
+ no_hover=True
else:
- windows=False
+ no_hover=False
uniq_id = str(uuid.uuid4())[:3]
@@ -135,7 +135,7 @@ def init_window(self):
combine_run_button = Button(root, text='Combine and Run', command= lambda: self.check_and_run())
backend_button = Button(root, text='Backend Analysis', command= lambda: self.open_window('backend', output_path))
- if not windows:
+ if not no_hover:
CreateToolTip(saveas_button, text='You can save your current instance with a different three-letter hash.')
CreateToolTip(load_button, text='You can load from a previous instance.\nFor every instance, the GUI automatically creates `output_xxx` directory\nwhere it saves all the files, so that it can be called later on.')
CreateToolTip(load_complete_input, text='You can load from a previously-existing Cyclus input xml file.\nThere are limitations to some input files, if they use special archetypes. You can edit or run cyclus on the file!')
@@ -213,38 +213,40 @@ def load_prev_window(self):
self.initialized['prev'] = True
self.load_window = Toplevel(self.master)
self.load_window.title('Load previous with hash')
- Label(self.load_window, text='Enter id:', bg='yellow').pack()
- entry = Entry(self.load_window)
- entry.pack()
- Button(self.load_window, text='Load!', command=lambda: self.load_prev(entry)).pack()
-
-
- def load_prev(self, entry):
folders = os.listdir(file_path)
folders = [f for f in folders if os.path.isdir(os.path.join(file_path, f))]
- hash_ = str(entry.get())
- for i in folders:
- if hash_ in i:
- files_in = os.listdir(os.path.join(file_path, 'output_%s'%hash_))
- info_text = 'Found folder %s.\nLoading input blocks:\n\n' %i
- for f_ in files_in:
- f_ = f_.replace('.xml', '')
- info_text += '\t%s\n' %f_
- messagebox.showinfo('Found!', info_text)
- global uniq_id
- global output_path
- uniq_id = hash_
- self.hash_var.set(hash_)
- print('Changed ID to %s' %hash_)
- output_path = os.path.join(file_path, i)
- self.load_window.destroy()
- shutil.rmtree('output_%s' %self.uniq_id)
- self.uniq_id = hash_
- self.initialized['prev'] = False
- return
- # if folder is not found,
- messagebox.showerror('Error', 'No folder with that name.\n The folder must exist in: \n %s' %file_path)
+ folders = [f for f in folders if 'output_' in f]
+ hashs = [f.replace('output_', '') for f in folders]
+ hashs = sorted([f for f in hashs if f != self.uniq_id])
+ Label(self.load_window, text='Current working directory:').pack()
+ Label(self.load_window, text=os.path.abspath(file_path), bg='yellow').pack()
+ Label(self.load_window, text='Available instances:').pack()
+ for h in hashs:
+ Button(self.load_window, text=h, command=lambda:self.load_prev(h)).pack()
+ if not hashs:
+ # if list is empty:
+ Label(self.load_window, text='NONE', bg='red').pack()
+
+
+ def load_prev(self, h):
+ files_in = os.listdir(os.path.join(file_path, 'output_%s'%h))
+ info_text = 'Found folder output_%s.\nLoading input blocks:\n\n' %h
+ for f_ in files_in:
+ f_ = f_.replace('.xml', '')
+ info_text += '\t%s\n' %f_
+ messagebox.showinfo('Found!', info_text)
+ global uniq_id
+ global output_path
+ uniq_id = h
+ self.hash_var.set(h)
+ print('Changed ID to %s' %h)
+ output_path = os.path.join(file_path, 'output_%s' %h)
+ self.load_window.destroy()
+ shutil.rmtree('output_%s' %self.uniq_id)
+ self.uniq_id = h
self.initialized['prev'] = False
+ return
+
def askopenfile(self):
file = filedialog.askopenfile(parent=self.master, mode='r', title='Choose an xml file')
@@ -273,24 +275,24 @@ def load_xml_file(self, file):
def load_from_pris(self):
guide_text = """
- You can `initialize' your simulation as a real-life nation!
- This method loads from the PRIS database and deploys reactors in your
- desired country, in a desired initial time. The reactor lifetimes
- are calculated as a remaining lifetime.
-
- Assumptions:
- 1. Timestep is assumed to be a month
- 2. Reactors below 100 MWe are filtered out (assumed to be research reactors)
- 3. Core size is linearly scaled with power capacity
- 4. Reactor lifetimes are all assumed to be 60 years from their first criticality date
- 5. Fuel Cycle facilities are deployed with infinite capacity.
-
- Simulation defaults:
- 1. Reactors are cycamore::Reactor (recipe reactors)
- 2. By default deploys a `RandLand' region with `Fuel_Cycle_Facilities' institution with facilities:
- a. `nat_u_source' -> [natl_u]
- b. [natl_u] -> `enrichment' -> [uox]
- d. [uox_waste, used_candu, mox_waste, tailings, reprocess_waste] -> `SomeSink'
+You can `initialize' your simulation as a real-life nation!
+This method loads from the PRIS database and deploys reactors in your
+desired country, in a desired initial time. The reactor lifetimes
+are calculated as a remaining lifetime.
+
+Assumptions:
+1. Timestep is assumed to be a month
+2. Reactors below 100 MWe are filtered out (assumed to be research reactors)
+3. Core size is linearly scaled with power capacity
+4. Reactor lifetimes are all assumed to be 60 years from their first criticality date
+5. Fuel Cycle facilities are deployed with infinite capacity.
+
+Simulation defaults:
+1. Reactors are cycamore::Reactor (recipe reactors)
+2. By default deploys a `RandLand' region with `Fuel_Cycle_Facilities' institution with facilities:
+ a. `nat_u_source' -> [natl_u]
+ b. [natl_u] -> `enrichment' -> [uox]
+ d. [uox_waste, used_candu, mox_waste, tailings, reprocess_waste] -> `SomeSink'
"""
self.guide(guide_text)
try:
diff --git a/cyclus_gui/gui/proto_window.py b/cyclus_gui/gui/proto_window.py
index 0a15d33..d3d9825 100644
--- a/cyclus_gui/gui/proto_window.py
+++ b/cyclus_gui/gui/proto_window.py
@@ -288,7 +288,7 @@ def submit(self):
facility_dict['name'] = name
facility_dict['config'] = config['config']
new_dict['root']['facility'].append(facility_dict)
- f.write(xmltodict.unparse(new_dict, pretty=True))
+ f.write('\n'.join(xmltodict.unparse(new_dict, pretty=True).split('\n')[1:]))
messagebox.showinfo('Sucess', 'Successfully rendered %i facility prototypes!' %len(new_dict['root']['facility']))
self.master.destroy()
self
diff --git a/cyclus_gui/gui/sim_window.py b/cyclus_gui/gui/sim_window.py
index 81e0d29..b7b6fad 100644
--- a/cyclus_gui/gui/sim_window.py
+++ b/cyclus_gui/gui/sim_window.py
@@ -67,7 +67,6 @@ def on_leave(self, event):
self.new_window.destroy()
-
def is_it_pos_integer(self, num):
if float(num) % 1.0 != 0.0:
return False
@@ -84,25 +83,25 @@ def read_xml(self):
def done(self):
- self.entry_dict = {key: val.get() for key, val in self.entry_dict.items()}
-
+ val_dict = {key:val.get() for key,val in self.entry_dict.items()}
# check input:
- if '' in self.entry_dict.values():
+ if '' in val_dict.values():
messagebox.showerror('Error', 'You omitted some parameters')
- elif not self.is_it_pos_integer(self.entry_dict['startmonth']):
+ elif not self.is_it_pos_integer(val_dict['startmonth']):
messagebox.showeeror('Error', 'Start Month must be a positive integer')
- elif not self.is_it_pos_integer(self.entry_dict['startyear']):
+ elif not self.is_it_pos_integer(val_dict['startyear']):
messagebox.showerror('Error', 'Start Year must be a positive integer')
- elif int(self.entry_dict['startmonth']) not in list(range(1,13)):
- messagebox.showeror('Error', 'Month has to be number from 1 to 12')
- elif self.entry_dict['decay'] not in ['never', 'lazy', 'manual']:
+ elif int(val_dict['startmonth']) not in list(range(1,13)):
+ messagebox.showerror('Error', 'Month has to be number from 1 to 12')
+ return
+ elif val_dict['decay'] not in ['never', 'lazy', 'manual']:
messagebox.showerror('Error', 'Decay must be either never, lazy, or manual')
- elif not self.is_it_pos_integer(self.entry_dict['dt']):
+ elif not self.is_it_pos_integer(val_dict['dt']):
messagebox.showerror('Error', 'dt must be a positive integer')
else:
messagebox.showinfo('Success', 'Rendered Simulation definition into xml! :)')
xml_string = '\n'
- for key, val in self.entry_dict.items():
+ for key, val in val_dict.items():
if key=='dt' and int(val)==2629846:
continue
if (key=='explicit_inventory' or key=='explicit_inventory_compact') and int(val)==0:
diff --git a/neams/cyclus.py b/neams/cyclus.py
deleted file mode 100644
index 9cc524d..0000000
--- a/neams/cyclus.py
+++ /dev/null
@@ -1,877 +0,0 @@
-#!/usr/bin/python
-"""WorkbenchRuntimeEnrivonment"""
-import argparse
-import time
-import getpass
-import json
-import os
-import platform
-import shutil
-import stat
-import paramiko
-import http.client
-import subprocess
-import uuid
-import sys
-import tempfile
-import threading
-
-def unpack_stringlist(stringlist):
- """parses stringlist that was formatted to pass on command-line into original array of strings"""
- if stringlist is None or stringlist == '':
- return None
- # if isinstance(stringlist,basestring):
- # no change
- if not isinstance(stringlist,str) and not isinstance(stringlist,basestring):
- raise ValueError("Method unpack_stringlist requires a string. Found a " + str(type(stringlist)))
- stringlist = stringlist.lstrip("'").rstrip("'")
- stringlist = stringlist.replace("__squote__","'")
- return stringlist.split("__delim__")
-
-
-def pack_stringlist(stringlist):
- """formats a stringlist to pass on command-line"""
- if stringlist is None or stringlist == '':
- return None
- if isinstance(stringlist,list):
- stringlist = '__delim__'.join(stringlist)
- return "'" + stringlist.replace("'","__squote__") + "'"
-
-def create_directory(directory):
- """creates the requested directory path"""
- # path is not an existing directory
- if not os.path.isdir(directory):
- # path exists, this is an error
- if os.path.exists(directory):
- # print error message and quit...
- print ("Error: specified directory path ({0}) exists but is not a " \
- "directory".format(directory))
- sys.exit(1)
- # path doesn't exist, try to create it
- else:
- # catch errors...
- os.makedirs(directory)
-
-def expand(expanding, variables):
- """expand the given string using the given variables"""
- # find right-most start of variable pattern
- start = expanding.rfind("${")
-
- # keep searching while we find variable patterns
- while start >= 0:
- # find left-most end of variable pattern (starting at current position)
- end = expanding.find("}", start + 1)
-
- # stop if no end was found
- if end < 0:
- break
-
- # variable name
- variable = expanding[start+2:end]
-
- # variable value
- if variable in variables:
- value = variables[variable]
- else:
- value = ""
-
- # replace the variable with its value and continue searching
- expanding = expanding[:start] + value + expanding[end+1:]
- start = expanding.rfind("${")
- return expanding
-
-def reader(sin, lines):
- """read each line from sin and add to lines"""
- # read the first line
- line = sin.readline()
-
- # read/write lines until no more can be read
- while line:
- # store last line, read next line
- lines.append(line)
- line = sin.readline()
-
- # close input if not closed
- if not sin.closed:
- sin.close()
-
-def streamer(sin, sout, tout=None):
- """forward input stream to output stream (and optional 'tee-out')"""
- # read the first line
- line = sin.readline()
-
- # read/write lines until no more can be read
- while line:
- # write the last line read, flush to ensure it's written
- sout.write(line)
- sout.flush()
-
- # tout (tee-out) is valid, write to it
- if tout:
- tout.write(line)
- tout.flush()
-
- # read next line
- line = sin.readline()
-
- # close input if not closed
- if not sin.closed:
- sin.close()
-
-def which(exe):
- """locate the given executable via the environment, if necessary"""
- # platform-specific executable checks
- is_windows = platform.system() == "Windows"
- if is_windows:
- exe_check = lambda p: (os.stat(p)[0] & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)) != 0
- else:
- exe_check = lambda p: os.access(p, os.X_OK)
-
- # determine whether the given path is an executable file
- is_exe = lambda p: os.path.isfile(p) and exe_check(p)
-
- # treat the given executable as a path
- path = os.path.realpath(exe)
-
- # this is an executable, early exit
- if is_exe(path):
- return path
-
- # not an executable, search environment via PATH
- for node in os.getenv("PATH", "").split(os.pathsep):
- # test executable path
- path = os.path.realpath(os.path.join(node, exe))
-
- # path is an executable, return it
- if is_exe(path):
- return path
- if is_windows and is_exe(path+".exe"):
- return path+".exe"
-
- # no suitable executable found
- return None
-
-class RunOptions(object):
- """stores per-input run options"""
- def __init__(self):
- self.input = None
- self.input_directory = None
- self.output_basename = None
- self.output_directory = None
- self.working_directory = None
-
-class WorkbenchRuntimeEnvironment(object):
- """base runtime environment class"""
- def __init__(self):
- # Capture RTE directory
- __file__ = '/Users/4ib/Desktop/git/cyclus_gui/neams'
- self.rte_dir = os.path.dirname(os.path.abspath(__file__))
- # initialize attributes
- self.additional = None
- self.cleanup = True
- self.executable = None
- self.inputs = []
- self.json = False
- self.grammar_path = "None"
- self.options = {"shared": [], "unique": []}
- self.output_basename = None
- self.output_directory = None
- self.tee = False
- self.timestamp = False
- self.verbosity = 0
- self.working_directory = None
-
- def set_executable(self, executable):
- self.executable = executable
- def add_arguments(self, parser):
- """adds default (known) arguments"""
- for option_type in self.options:
- for option in self.options[option_type]:
- # skip empty options
- if not bool(option):
- continue
-
- # clone current option for modification
- clone = dict(option)
-
- # capture flag, if specified
- if "flag" in clone:
- flag = clone["flag"]
-
- # remove all fields
- for key in ["deprecated", "flag", "name"]:
- if key in clone:
- del clone[key]
-
- # modify 'type' field to be a callable
- if "type" in clone:
- # cached 'type' field
- cached = clone["type"]
-
- # modify 'type' field for the arg parser
- if cached == "bool":
- del clone["type"]
- elif cached == "float":
- clone["type"] = float
- elif cached == "int":
- clone["type"] = int
- elif cached == "string" or cached == "stringlist":
- clone["type"] = str
-
- # parse args
- parser.add_argument(flag, **clone)
-
- def __add_options(self):
- """private method to add 'shared' options"""
- shared = self.options["shared"]
-
- shared.append({
- "default": self.additional,
- "flag": "additional",
- "help": "Additional arguments to pass to the executable",
- "metavar": "arg",
- "name": "Additional Arguments",
- "nargs": "*",
- "type": "stringlist"
- })
- shared.append({
- "default": self.executable,
- "dest": "executable",
- "flag": "-e",
- "help": "Path to the executable to run",
- "metavar": "executable",
- "name": "Executable",
- "required": True,
- "type": "string"
- })
- shared.append({
- "default": None,
- "dest": "inputs",
- "flag": "-i",
- "help": "Path(s) to input file(s)",
- "metavar": "input_file",
- "name": "Input(s)",
- "nargs": "+",
- "required": True,
- "type": "string"
- })
- shared.append({
- "action": self.print_options(),
- "default": self.json,
- "dest": "json",
- "flag": "-json",
- "help": "Print available options in JSON format and quit",
- "nargs": 0,
- "type": "bool"
- })
- shared.append({
- "action": self.__grammar(),
- "default": self.grammar_path,
- "dest": "grammar_path",
- "flag": "-grammar",
- "help": "Print the path the application's input grammar file path, \
- only if the given file is older than the input grammar",
- "type": "string"
- })
- shared.append({
- "action": "store_true",
- "default": not self.cleanup,
- "dest": "cleanup",
- "flag": "-k",
- "help": "Keep the working directory after execution finishes",
- "name": "Save Working Directory",
- "type": "bool"
- })
- shared.append({
- "default": self.output_basename,
- "dest": "output_basename",
- "flag": "-o",
- "help": "Name of the generated output file. "
- "If it is an absolute or relative path, it overrides output_directory.",
- "metavar": "output_basename",
- "name": "Output Basename",
- "type": "string"
- })
- shared.append({
- "default": self.output_directory,
- "dest": "output_directory",
- "flag": "-O",
- "help": "Directory in which to store the output",
- "metavar": "output_directory",
- "name": "Output Directory",
- "type": "string"
- })
- shared.append({
- "action": "store_true",
- "default": self.timestamp,
- "dest": "timestamp",
- "flag": "-t",
- "help": "Whether to timestamp the working directory, output files, etc.",
- "name": "Timestamp",
- "type": "bool"
- })
- shared.append({
- "default": self.verbosity,
- "dest": "verbosity",
- "flag": "-v",
- "help": "Level of verbosity when logging information (higher values = more messages)",
- "metavar": "verbosity_level",
- "name": "Verbose Level",
- "type": "int"
- })
- shared.append({
- "default": self.working_directory,
- "dest": "working_directory",
- "flag": "-w",
- "help": "Directory in which to run the executable (implies -k)",
- "metavar": "working_directory",
- "name": "Working Directory",
- "type": "string"
- })
-
- # app-specific options
- self.options["unique"] = self.app_options()
-
- def print_options(self):
- """dump a JSON packet of supported options"""
- # reference to self.options
- opts = self.options
- env = self.environment()
-
- class OptionsAction(argparse.Action):
- """action class to dump supported options in a proper format"""
- def __call__(self, parser, namespace, values, option_string=None):
- # dicts containing supported options
- options = {}
-
- # populate based on member's keys
- for key in opts.keys():
- options[key] = []
-
- # supported keys
- supported = ["default", "flag", "help", "name", "type"]
-
- # loop over given options to determine which are supported for output
- for group in opts.keys():
- # loop over each option in this group
- for option in opts[group]:
- # only add option if it has a 'name' entry
- if "name" in option:
- # option dict to retain
- custom = {}
-
- # only include supported keys
- for key in supported:
- # store known keys
- if key in option:
- # store key/value
- custom[key] = option[key]
-
- # add to options
- options[group].append(custom)
-
- print (json.JSONEncoder().encode({
- "options": options,
- "environment": env
- }))
- sys.exit(0)
-
- return OptionsAction
-
- def __grammar(self):
- """dump a SON-formatted input grammar to the provided file"""
- class OptionsAction(argparse.Action):
- """action class to dump grammar to provide file"""
- def __call__(self, parser, namespace, values, option_string=None):
- grammar_path = values
- namespace.update_and_print_grammar(grammar_path)
- sys.exit(0)
-
- return OptionsAction
-
- def update_and_print_grammar(self, grammar_path):
- """Checks the provided grammar file and determines if it is out of date
- and if so, updates it accordingly"""
- print (grammar_path)
- return
-
- def get_grammar_additional_resources(self, grammar_file_path):
- """Returns a list of filepaths that need included which are not normally
- included"""
- return None
-
- def app_name(self):
- """returns the app's self-designated name"""
- return "cyclus"
-
- def app_options(self):
- """list of app-specific options"""
- unique = []
- unique.append({
- "default": '',
- "dest": "remote_server_address",
- "flag": "-r",
- "help": "Remote server address",
- "metavar": "remote_server_address",
- "name": "Remote Server Address",
- "type": "string"
- })
- unique.append({
- "default": '',
- "dest": "remote_server_username",
- "flag": "-u",
- "help": "Remote server username",
- "metavar": "remote_server_username",
- "name": "Remote Username",
- "type": "string"
- })
- unique.append({
- "default": '',
- "dest": "remote_server_password",
- "flag": "-p",
- "help": "Remote server password",
- "metavar": "remote_server_password",
- "name": "Remote Password",
- "type": "string"
- })
- # this is for proxy and other techniques
- unique.append({
- "default": '',
- "dest": "proxy_hostname",
- "flag": "-ph",
- "help": "SSH proxy hostname",
- "metavar": "proxy_hostname",
- "name": "SSH Proxy Hostname",
- "type": "string"
- })
- unique.append({
- "default": '',
- "dest": "proxy_port",
- "flag": "-pp",
- "help": "SSH proxy port",
- "metavar": "proxy_port",
- "name": "SSH Proxy Port",
- "type": "string"
- })
-
- return unique
-
- def echo(self, level, *args):
- """print messages to the console (based on verbosity)"""
- if self.verbosity >= level:
- print ("".join(args))
-
- def environ(self, _input):
- """generate a subprocess' environment"""
- env = os.environ
-
- # init known variables
- env["APP_NAME"] = self.app_name()
- env["INPUT_BASENAME"] = os.path.splitext(os.path.basename(_input))[0]
- env["PID"] = str(os.getpid())
- env["UID"] = getpass.getuser()
-
- return env
-
- def environment(self):
- """generate a dict of supported environment variables"""
- return {}
-
- def execute(self, args):
- """execute the runtime per given input"""
- # parse/process arguments
- parser = argparse.ArgumentParser(description="Runtime Environment")
-
- self.__add_options()
- self.add_arguments(parser)
- self.process_args(parser, args)
-
-
- # execute each input
- for _input in self.inputs:
- # input-specific excution environment
- env = self.environ(_input)
-
- # current input's options
- options = RunOptions()
-
- options.input = os.path.abspath(expand(_input, env))
- options.input_directory = os.path.abspath(os.path.dirname(options.input))
- options.output_basename = expand(self.output_basename
- if self.output_basename
- else "${INPUT_BASENAME}", env)
- options.output_directory = expand(self.output_directory
- if self.output_directory
- else options.input_directory, env)
- options.working_directory = expand(os.path.join(self.working_directory,
- "${APP_NAME}.${UID}.${PID}"), env)
-
- # cache current working directory
- cwd = os.getcwd()
-
- # exeute input
- self.prerun(options)
- self.run(options)
- self.postrun(options)
-
- # change to cached working directory so cleanup can happen
- os.chdir(cwd)
-
- # perform cleanup?
- if self.cleanup and os.path.exists(options.working_directory):
- shutil.rmtree(options.working_directory)
-
- def output_basename_overridden(self):
- """determines whether the output_basename field was overridden"""
- return self.output_basename != None
-
- def output_directory_overridden(self):
- """determines whether the output_directory field was overridden"""
- return self.output_directory != None
-
- def postrun(self, options):
- """actions to perform after the run finishes"""
-
- def prerun(self, options):
- """actions to perform before the run starts"""
- self.echo(1, "#### Pre-run ####")
-
- # ensure output directory exists
- self.echo(1, "# Ensuring output directory exists...")
- create_directory(options.output_directory)
- self.echo(2, "# ", options.output_directory)
-
- # ensure working directory exists, navigate to it
- self.echo(1, "# Ensuring current working directory exists...")
- create_directory(options.working_directory)
- self.echo(2, "# ", options.working_directory)
- os.chdir(options.working_directory)
-
- self.echo(1, "#### Pre-run ####")
- self.echo(1)
-
- def process_args(self, parser, args):
- """parse/process the arguments"""
- self.echo(1, "#### Processing arguments ####")
-
- # parse args
- parser.parse_args(args=args, namespace=self)
-
- # check if it is remote run or local run
- if self.remote_server_address:
- self.is_remote = True
- self.echo(1, '# Remote Execution Enabled.')
-
-
- if self.is_remote:
- # this remote execution assumes that the remote server is a UNIX environment,
- # only tested on Ubuntu.
- try:
- # check if the executable exists:
- self.ssh = paramiko.SSHClient()
- self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- self.echo(1, '# Attempting to connect to %s' %self.remote_server_address)
- self.echo(1, '# As user "%s"' %self.remote_server_username)
- self.echo(1, '# With password "%s"' %self.remote_server_password)
- if self.proxy_hostname:
- self.echo(1, '# With proxy hostname "%s"' %self.proxy_hostname)
- self.echo(1, '# With proxy port "%s"' %self.proxy_port)
- http_con = http.client.HTTPConnection(self.proxy_hostname, self.proxy_port)
- http_con.set_tunnel(self.remote_server_address, 22, {})
- http_con.connect()
- sock = http_con.sock
- self.ssh.connect(self.remote_server_address, username=self.remote_server_username,
- password=self.remote_server_password, sock=sock,
- allow_agent=False, look_for_keys=False)
- else:
- self.ssh.connect(self.remote_server_address, username=self.remote_server_username,
- password=self.remote_server_password,
- allow_agent=False, look_for_keys=False)
-
- self.echo(1, '# Connected.')
- self.echo(1, '# Testing if the executable exists...')
- # check if file is executable
- output = self.remote_execute('test -x %s && echo "yayyy"' %self.executable)
- if 'yay' in output:
- self.echo(1, '# The file in the defined path is executable.')
- else:
- self.echo(0, 'The file is not Executable')
- sys.exit(1)
- except Exception as e:
- self.echo(0, 'Could not connect. Check arguments.')
- self.echo(0, 'See Error below:')
- self.echo(0, e)
- sys.exit(1)
-
- else:
- self.echo(1, "# Expanding executable path using environment variables")
- self.executable = expand(os.path.expanduser(self.executable), os.environ)
- self.echo(2, "# Executable: ", self.executable)
- # look for valid executable
- self.echo(1, "# Checking whether ", self.executable, " is executable")
- exe = which(self.executable)
- self.echo(2, "# Executable: ", exe)
-
- # verify executable can be executed
- if not exe:
- print ("Error: specified executable ({0}) is not a valid, executable " \
- "file".format(self.executable))
- sys.exit(1)
- # update with full path to executable
- self.executable = exe
-
- # output directory specified
- if self.output_directory:
- self.echo(1, "# Fixing up output_directory...")
- # expand env vars/user-area prefix(es), convert to absolute path
- self.output_directory = expand(self.output_directory, os.environ)
- self.output_directory = os.path.expanduser(self.output_directory)
- self.output_directory = os.path.abspath(self.output_directory)
- self.echo(2, "# ", self.output_directory)
-
- # a relative or absolute path in output_basename overrides the output_directory
- if self.output_basename:
- self.echo(1, "# Fixing up output_basename...")
- # expand env vars/user-area prefix(es), normalize path
- self.output_basename = expand(self.output_basename, os.environ)
- self.output_basename = os.path.expanduser(self.output_basename)
- self.output_basename = os.path.normpath(self.output_basename)
- self.echo(2, "# ", self.output_basename)
-
- # split into parent/child paths
- (parent, child) = os.path.split(self.output_basename)
-
- # there was a parent directory, override output_directory
- if parent:
- self.echo(1, "# output_basename overriding output_directory...")
- self.output_directory = os.path.abspath(parent)
- self.output_basename = child
- self.echo(2, "# ", self.output_directory, " ... ", self.output_basename)
-
- self.echo(1, "# Ensuring root working directory exists...")
-
- # working directory specified, ensure it exists
- if self.working_directory:
- # expand env vars/user-area prefix(es), convert to absolute path
- self.working_directory = expand(self.working_directory, os.environ)
- self.working_directory = os.path.expanduser(self.working_directory)
- self.working_directory = os.path.abspath(self.working_directory)
-
- # create working directory, override cleanup flag
- create_directory(expand(self.working_directory, os.environ))
- self.cleanup = False
- # working directory not specified, default to a temp directory
- else:
- self.working_directory = tempfile.gettempdir()
-
- self.echo(2, "# ", self.working_directory)
- self.echo(1, "#### Processing arguments ####")
- self.echo(1)
-
- def remote_execute(self, cmd):
- i, o, e = self.ssh.exec_command(cmd)
- output = '\n'.join(o.readlines())
- error = '\n'.join(e.readlines())
- if len(error) != 0:
- return error
- return output
-
-
- def run(self, options):
- """run the given executable"""
- self.echo(1, "#### Run ", self.app_name(), " ####")
-
- # list of arguments to pass when launching the executable
- # If the executable is a python script, make sure to use
- # Workbench's python environment as this is likely more
- # recent and contains packages that are not available
- # with default Python installations
- print(self.executable)
- if self.executable.endswith(".py"):
- args = [sys.executable, self.executable]
- else:
- args = [self.executable]
-
- # pass 'additional' to the executable if given
- if self.additional:
- args.extend(self.additional)
- # request list of supported arguments to pass to the executable
- args.extend(self.run_args(options))
- print('args')
- print(args)
- if self.is_remote:
- self.echo(1, "#### Executing '", " ".join(args), "' on remote server %s " %self.remote_server_address)
- rtncode = 0
- try:
- # since we checked the connection from the checking executable part,
- # that part can be skipped
- # upload file
- duplicate_hash = True
- # just in case the hash exists
- n =0
- import os
- while duplicate_hash and n < 3:
- rnd_dir = os.path.join('/home/', self.remote_server_username, str(uuid.uuid4()))
- remote_input_path = os.path.join(rnd_dir, 'input.xml')
- remote_output_path = remote_input_path.replace('.xml', '.sqlite')
- output = self.remote_execute('mkdir %s' %rnd_dir)
- print('error', output)
- n+=1
- if not output:
- # empty output means nothing went wrong,
- duplicate_hash = False
- self.echo(1, '# Uploading input file to %s' %self.remote_server_address)
- self.echo(1, '# To path "%s"' %remote_input_path)
- ftp = self.ssh.open_sftp()
- ftp.put(options.input ,remote_input_path)
-
- self.echo(1, '# Now running %s...' %self.app_name())
- output = self.remote_execute('%s %s -o %s --warn-limit 0' %(self.executable, remote_input_path, remote_output_path))
- # this is super wonky, consider changing
- if output == 0 or ('Error' not in output and 'error' not in output and 'Abort' not in output and 'fatal' not in output and 'Invalid' not in output):
-
- self.echo(1, '############################' )
- self.echo(1, '# %s ran successfully!' %self.app_name())
- self.echo(1, '############################' )
-
- self.echo(1, '# Now downloading output file')
- pre, ext = os.path.splitext(options.input)
- ftp.get(remote_output_path, os.path.join(self.working_directory, pre + '.out'))
- # this is super wonky, consider changing
- time.sleep(5)
- self.echo(1, '# Download complete (%s)' %os.path.join(self.working_directory, pre + '.out'))
-
- else:
- self.echo(1, '# Run Failed! See the following output')
- self.echo(1, output)
-
- except Exception as e:
- self.echo(0, 'Something Went Wrong')
- self.echo(0, 'See Error below:')
- print(e)
- self.echo(0, e)
- sys.exit(1)
-
- else:
- # local run
- self.echo(1, "#### Executing '"," ".join(args),"'")
- # execute
- rtncode = 0
- try:
- proc = subprocess.Popen(args, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
-
- # tee-output objects
- teeout = None
- teeerr = None
-
- # tee requested
- if self.tee:
- teeout = open(options.output_basename + ".out", "w")
- teeerr = open(options.output_basename + ".err", "w")
-
- # start background readers
- out = threading.Thread(target=streamer, name="out_reader",
- args=(proc.stdout, sys.stdout, teeout))
- err = threading.Thread(target=streamer, name="err_reader",
- args=(proc.stderr, sys.stderr, teeerr))
- out.start()
- err.start()
-
- # wait for process to finish
- proc.wait()
- out.join()
- err.join()
- rtncode = proc.returncode
- except:
- # On some systems with limited or no network access the subprocess module
- # may have failed to install properly. This is a fall back strategy
- import os
- rtncode = os.system(" ".join(args))
-
- self.echo(1, "# Finished running ", self.app_name(), " with exit code ",
- str(proc.returncode))
- self.echo(1)
-
- def run_args(self, options):
- """returns a list of arguments to pass to the given executable"""
- return [options.input]
-
- def strip_unit_of_execution(self, filename, unit_name, strip_son=False):
- """Removes the unit of execution delimiters.
- =unit_name
- ...
- end
- These two lines will be replaced with empty lines.
- If these exist, this function returns a new filename
- to a 'clean' file.
- If no delimiters were encountered, None is returned
- """
- # must determine if this has runtime constructs '^=unit_name'
- # terminated by '^end'. This is not native
- # and must be removed
- has_rt_delimiters = False # assume no '=unit_name...end' exists
-
- if strip_son:
- self.echo(1, "Removing Standard Object Delimiters...")
-
- with open(filename, 'r') as input_file:
- # consume the entire file
- input_file_lines = input_file.readlines()
- index = 0
-
- for line in input_file_lines:
- stripped_line = line.strip().lower()
-
- if strip_son:
- input_file_lines[index] = input_file_lines[index].replace("{", " ")
- input_file_lines[index] = input_file_lines[index].replace("}", " ")
- input_file_lines[index] = input_file_lines[index].replace("]", " ")
- input_file_lines[index] = input_file_lines[index].replace("[", " ")
- input_file_lines[index] = input_file_lines[index].replace("= YES", " ")
- has_rt_delimiters = True
-
- if stripped_line == "=" + unit_name:
- has_rt_delimiters = True
- # Replace runtime delimiter with empty space
- input_file_lines.pop(index)
- input_file_lines.insert(index, "")
- elif stripped_line == "end":
- has_rt_delimiters = True
- # Replace runtime delimiter with empty space
- input_file_lines.pop(index)
- input_file_lines.insert(index, "")
-
- index += 1
-
- # if the delimiters are present write the clean file
- if has_rt_delimiters:
- clean_input_file_name = filename + ".cleaned"
- self.echo(1, " -- Runtime construct discovered, producing ", clean_input_file_name)
-
- with open(clean_input_file_name, 'w') as cleaned_input_file:
- for line in input_file_lines:
- cleaned_input_file.write(line)
-
- # update the input file to be the cleaned version
- return clean_input_file_name
-
- # if no stripping occurred, None is the response
- return None
-
- def working_directory_overridden(self):
- """determines whether the working_directory field was overridden"""
- return self.working_directory != tempfile.gettempdir()
-
- def exit_gracefully(self, signum, frame):
- """Cleanup steps to perform after receiving the SIGTERM signal"""
- return
-
-if __name__ == "__main__":
- # from util.check_version import CheckVersion
- # CheckVersion.check()
- import signal
- rte = WorkbenchRuntimeEnvironment()
- signal.signal(signal.SIGTERM, rte.exit_gracefully)
- signal.signal(signal.SIGINT, rte.exit_gracefully)
- # execute runtime, ignoring first argument (the python script itself)
- rte.execute(sys.argv[1:])
diff --git a/neams/cyclus.sch b/neams/cyclus.sch
deleted file mode 100644
index 8d442c3..0000000
--- a/neams/cyclus.sch
+++ /dev/null
@@ -1,395 +0,0 @@
-simulation {
- Description="Agent-based fuel cycle simulator"
- InputTmpl="init_template"
- control {
- MinOccurs=1
- Description="Defines simulation time and decay methods"
- MaxOccurs=1
- duration={
- MinOccurs=1
- MaxOccurs=1
- Description="the number of timesteps in simulation"
- ValType=Int
- }
- startyear={
- MinOccurs=1
- MaxOccurs=1
- Description="the year to start the simulation"
- ValType=Int
- }
- startmonth={
- MinOccurs=1
- MaxOccurs=1
- Description="the month to start the simulation"
- ValType=Int
- ValEnums=[ 1 2 3 4 5 6 7 8 9 10 11 12 ]
- }
- decay={
- MinOccurs=0
- MaxOccurs=1
- Description="How to model decay in Cyclus"
- ValType=String
- ValEnums=["lazy" "manual" "never"]
- }
- dt={
- MinOccurs=0
- MaxOccurs=1
- ValType=Real
- Description="duration of a single timestep in seconds"
- }
- explicit_inventory={
- MinOccurs=0
- MaxOccurs=1
- ValType=Int
- ValEnums=[0 1]
- Description="boolean specifying whether or nor to track inventory in each agent"
- }
-
- }
-
- archetypes {
- MinOccurs=1
- Description="Defines the archetypes used in this simulation"
- MaxOccurs=1
- spec={
- MinOccurs=1
- lib={MinOccurs=1
- MaxOccurs=1
- ValType=String
- }
- name={MinOccurs=1
- MaxOccurs=1
- ValType=String
- }
- }
- }
-
- facility {
- name= {MinOccurs=1
- MaxOccurs=1
- ValType=String}
- Description="Facility definition block"
- MinOccurs=1
- config = {MinOccurs=1
- MaxOccurs=1
- ChildExactlyOne = [KFacility Predator Prey Sink Source Enrichment FuelFab Mixer Reactor Separations Storage]
- KFacility=
- {InputTmpl="KFacility"
- current_capacity={MaxOccurs=1 ValType=Real}
- in_capacity={MaxOccurs=1 MinOccurs=1 ValType=Real}
- in_commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- k_factor_in={MaxOccurs=1 MinOccurs=1 ValType=Real}
- k_factor_out={MaxOccurs=1 MinOccurs=1 ValType=Real}
- max_inv_size={MaxOccurs=1 ValType=Real}
- out_capacity={MaxOccurs=1 MinOccurs=1 ValType=Real}
- out_commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- recipe_name={MaxOccurs=1 MinOccurs=1 ValType=String}}
- Predator=
- {InputTmpl="Predator"
- age={MaxOccurs=1 ValType=Int}
- birth_and_death={MaxOccurs=1 ValType=Int}
- commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- consumed={MaxOccurs=1 ValType=Real}
- dead={MaxOccurs=1 ValType=Int}
- full={MaxOccurs=1 ValType=Real}
- hunt_cap={MaxOccurs=1 ValType=Real}
- hunt_factor={MaxOccurs=1 ValType=Int}
- hunt_freq={MaxOccurs=1 ValType=Int}
- lifespan={MaxOccurs=1 ValType=Int}
- nchildren={MaxOccurs=1 ValType=Real}
- prey={MaxOccurs=1 MinOccurs=1 ValType=String}
- success={MaxOccurs=1 ValType=Real}}
- Prey=
- {InputTmpl="Prey"
- age={MaxOccurs=1 ValType=Int}
- birth_and_death={MaxOccurs=1 ValType=Int}
- birth_freq={MaxOccurs=1 ValType=Int}
- commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- dead={MaxOccurs=1 ValType=Int}
- nchildren={MaxOccurs=1 ValType=Int}}
- Sink=
- {InputTmpl="Sink"
- capacity={MaxOccurs=1 ValType=Real}
- in_commod_prefs={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- in_commods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- max_inv_size={MaxOccurs=1 ValType=Real}
- recipe_name={MaxOccurs=1 ValType=String}}
- Source=
- {InputTmpl="Source"
- inventory_size={MaxOccurs=1 ValType=Real}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- outcommod={MaxOccurs=1 MinOccurs=1 ValType=String}
- outrecipe={MaxOccurs=1 ValType=String}
- throughput={MaxOccurs=1 ValType=Real}}
- Enrichment=
- {InputTmpl="Enrichment"
- feed_commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- feed_recipe={MaxOccurs=1 MinOccurs=1 ValType=String}
- initial_feed={MaxOccurs=1 ValType=Real}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- max_enrich={MaxOccurs=1 ValType=Real}
- max_feed_inventory={MaxOccurs=1 ValType=Real}
- order_prefs={MaxOccurs=1 ValType=Int}
- product_commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- swu_capacity={MaxOccurs=1 ValType=Real}
- tails_assay={MaxOccurs=1 ValType=Real}
- tails_commod={MaxOccurs=1 MinOccurs=1 ValType=String}}
- FuelFab=
- {InputTmpl="FuelFab"
- fill_commod_prefs={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- fill_commods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- fill_recipe={MaxOccurs=1 MinOccurs=1 ValType=String}
- fill_size={MaxOccurs=1 MinOccurs=1 ValType=Real}
- fiss_commod_prefs={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- fiss_commods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- fiss_recipe={MaxOccurs=1 ValType=String}
- fiss_size={MaxOccurs=1 MinOccurs=1 ValType=Real}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- outcommod={MaxOccurs=1 MinOccurs=1 ValType=String}
- spectrum={MaxOccurs=1 MinOccurs=1 ValType=String}
- throughput={MaxOccurs=1 ValType=Real}
- topup_commod={MaxOccurs=1 ValType=String}
- topup_pref={MaxOccurs=1 ValType=Real}
- topup_recipe={MaxOccurs=1 ValType=String}
- topup_size={MaxOccurs=1 ValType=Real}}
- Mixer=
- {InputTmpl="Mixer"
- in_streams={MaxOccurs=1
- MinOccurs=1
- stream={MinOccurs=1
- commodities={MaxOccurs=1
- MinOccurs=1
- item={MinOccurs=1
- commodity={MaxOccurs=1
- MinOccurs=1
- ValType=String}
- pref={MaxOccurs=1
- MinOccurs=1
- ValType=Real}}}
- info={MaxOccurs=1
- MinOccurs=1
- buf_size={MaxOccurs=1
- MinOccurs=1
- ValType=Real}
- mixing_ratio={MaxOccurs=1
- MinOccurs=1
- ValType=Real}}}}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- out_buf_size={MaxOccurs=1 ValType=Real}
- out_commod={MaxOccurs=1 MinOccurs=1 ValType=String}
- throughput={MaxOccurs=1 ValType=Real}}
- Reactor=
- {InputTmpl="Reactor"
- assem_size={MaxOccurs=1 MinOccurs=1 ValType=Real}
- cycle_step={MaxOccurs=1 ValType=Int}
- cycle_time={MaxOccurs=1 ValType=Int}
- decom_transmute_all={MaxOccurs=1 ValType=Int}
- fuel_incommods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- fuel_inrecipes={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- fuel_outcommods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- fuel_outrecipes={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- fuel_prefs={MaxOccurs=1 val={MinOccurs=1 ValType=Real}}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- n_assem_batch={MaxOccurs=1 MinOccurs=1 ValType=Int}
- n_assem_core={MaxOccurs=1 ValType=Int}
- n_assem_fresh={MaxOccurs=1 ValType=Int}
- n_assem_spent={MaxOccurs=1 ValType=Int}
- power_cap={MaxOccurs=1 ValType=Real}
- power_name={MaxOccurs=1 ValType=String}
- pref_change_commods={MaxOccurs=1
- val={MinOccurs=1 ValType=String}}
- pref_change_times={MaxOccurs=1
- val={MinOccurs=1 ValType=Int}}
- pref_change_values={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- recipe_change_commods={MaxOccurs=1
- val={MinOccurs=1 ValType=String}}
- recipe_change_in={MaxOccurs=1
- val={MinOccurs=1 ValType=String}}
- recipe_change_out={MaxOccurs=1
- val={MinOccurs=1 ValType=String}}
- recipe_change_times={MaxOccurs=1
- val={MinOccurs=1 ValType=Int}}
- refuel_time={MaxOccurs=1 ValType=Int}
- side_product_quantity={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- side_products={MaxOccurs=1
- val={MinOccurs=1 ValType=String}}}
- Separations=
- {InputTmpl="Separations"
- feed_commod_prefs={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- feed_commods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- feed_recipe={MaxOccurs=1 ValType=String}
- feedbuf_size={MaxOccurs=1 MinOccurs=1 ValType=Real}
- latitude={MaxOccurs=1 ValType=Real}
- leftover_commod={MaxOccurs=1 ValType=String}
- leftoverbuf_size={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- streams={MaxOccurs=1
- MinOccurs=1
- item={MinOccurs=1
- commod={MaxOccurs=1
- MinOccurs=1
- ValType=String}
- info={MaxOccurs=1
- MinOccurs=1
- buf_size={MaxOccurs=1
- MinOccurs=1
- ValType=Real}
- efficiencies={MaxOccurs=1
- MinOccurs=1
- item={MinOccurs=1
- comp={MaxOccurs=1
- MinOccurs=1
- ValType=String}
- eff={MaxOccurs=1
- MinOccurs=1
- ValType=Real}}}}}}
- throughput={MaxOccurs=1 ValType=Real}}
- Storage=
- {InputTmpl="Storage"
- discrete_handling={MaxOccurs=1 ValType=Int}
- in_commod_prefs={MaxOccurs=1
- val={MinOccurs=1 ValType=Real}}
- in_commods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- in_recipe={MaxOccurs=1 ValType=String}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- max_inv_size={MaxOccurs=1 ValType=Real}
- out_commods={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}
- residence_time={MaxOccurs=1 ValType=Int}
- throughput={MaxOccurs=1 ValType=Real}}
-}
- }
-
- region{
- Description="Region definition block"
- name= {MinOccurs=1
- MaxOccurs=1
- ValType=String}
- MinOccurs=1
- config= {MinOccurs=1
- MaxOccurs=1
- ChildExactlyOne = [NullRegion GrowthRegion]
- NullRegion=
- {InputTmpl="NullRegion"}
- GrowthRegion=
- {InputTmpl="GrowthRegion"
- growth={MaxOccurs=1
- MinOccurs=1
- item={MinOccurs=1
- commod={MaxOccurs=1
- MinOccurs=1
- ValType=String}
- piecewise_function={MaxOccurs=1
- MinOccurs=1
- piece={MinOccurs=1
- function={MaxOccurs=1
- MinOccurs=1
- params={MaxOccurs=1
- MinOccurs=1
- ValType=String}
- type={MaxOccurs=1
- MinOccurs=1
- ValType=String}}
- start={MaxOccurs=1
- MinOccurs=1
- ValType=Int}}}}}
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}}
-
- }
- institution={MinOccurs=1
- name= {MinOccurs=1
- MaxOccurs=1
- ValType=String}
- config={MinOccurs=1
- MaxOccurs=1
- ChildExactlyOne = [NullInst DeployInst ManagerInst]
- NullInst=
- {InputTmpl="NullInst"}
- DeployInst=
- {InputTmpl="DeployInst"
- build_times={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=Int}}
- latitude={MaxOccurs=1 ValType=Real}
- lifetimes={MaxOccurs=1 val={MinOccurs=1 ValType=Int}}
- longitude={MaxOccurs=1 ValType=Real}
- n_build={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=Int}}
- prototypes={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}}
- ManagerInst=
- {InputTmpl="ManagerInst"
- latitude={MaxOccurs=1 ValType=Real}
- longitude={MaxOccurs=1 ValType=Real}
- prototypes={MaxOccurs=1
- MinOccurs=1
- val={MinOccurs=1 ValType=String}}}
-}
- initialfacilitiylist={MaxOccurs=1
- entry={MinOccurs=1
- number={MaxOccurs=1
- ValType=Int}
- prototype={MaxOccurs=1
- ValType=String}
- }
- }
- }
- }
-
- recipe{
- Description="Recipe definition block"
- name={
- MinOccurs=1
- MaxOccurs=1
- ValType=String
- }
- basis={
- MinOccurs=1
- MaxOccurs=1
- ValType=String
- ValEnums=["mass" "atom"]
- }
- nuclide={
- MinOccurs=1
- id={MinOccurs=1 MaxOccurs=1}
- comp={MinOccurs=1 MaxOccurs=1 ValType=Real}
- }
- }
-
-}
diff --git a/neams/cyclus.wbg b/neams/cyclus.wbg
deleted file mode 100644
index 64f806b..0000000
--- a/neams/cyclus.wbg
+++ /dev/null
@@ -1,13 +0,0 @@
-name= Cyclus
-enabled = true
-
-parser = waspson
-schema = "/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.sch"
-validator = wasp
-
-templates = "/Users/4ib/Desktop/git/cyclus_gui/neams/templates/"
-
-highlighter = "/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.wbh"
-
-extensions = [cyclus]
-maxDepth = 10
diff --git a/neams/cyclus.wbh b/neams/cyclus.wbh
deleted file mode 100644
index 78ab8c7..0000000
--- a/neams/cyclus.wbh
+++ /dev/null
@@ -1,95 +0,0 @@
-rule("simulation") {
-pattern = "simulation"
-bold = true
-foreground {
- red = 0
- green = 0
- blue = 255
- }
-}
-
- rule(" control ") {
-pattern = " control "
-bold = true
-foreground {
- red = 0
- green = 0
- blue = 255
- }
-}
-
- rule(" archetypes ") {
-pattern = " archetypes "
-bold = true
-foreground {
- red = 0
- green = 0
- blue = 255
- }
-}
-
- rule(" facility ") {
-pattern = " facility "
-bold = true
-foreground {
- red = 0
- green = 0
- blue = 255
- }
-}
-
- rule(" region ") {
-pattern = " region "
-bold = true
-foreground {
- red = 0
- green = 0
- blue = 255
- }
-}
-
- rule(" recipe ") {
-pattern = " recipe "
-bold = true
-foreground {
- red = 0
- green = 0
- blue = 255
- }
-}
-
- rule("Quoted string") {
-pattern = """'[^']*'|"[^"]*""""
-bold = true
-foreground {
- red = 255
- green = 130
- blue = 0
- }
-background {
- red = 255
- green = 130
- blue = 0
- alpha = 25
- }
-}
-
-rule("equal"){
-pattern="="
-background{
- red=192
- green=192
- blue=192
- }
-}
-
-rule("Comment") {
- pattern = "%.*"
- italic = true
- foreground {
- red = 0
- green = 128
- blue = 0
- }
-}
-
diff --git a/neams/example.json b/neams/example.json
deleted file mode 100644
index 33766cc..0000000
--- a/neams/example.json
+++ /dev/null
@@ -1,171 +0,0 @@
-{
- "simulation": {
- "archetypes": {
- "spec": [
- {"lib": "agents", "name": "NullInst"},
- {"lib": "agents", "name": "NullRegion"},
- {"lib": "cycamore", "name": "Source"},
- {"lib": "cycamore", "name": "Sink"},
- {"lib": "cycamore", "name": "Enrichment"},
- {"lib": "cycamore", "name": "Reactor"},
- {"lib": "cycamore", "name": "FuelFab"},
- {"lib": "cycamore", "name": "Separations"}
- ]
- },
- "control": {"duration": "600", "startmonth": "1", "startyear": "2000"},
- "facility": [
- {
- "config": {
- "Enrichment": {
- "feed_commod": "natl_u",
- "feed_recipe": "natl_u",
- "initial_feed": "1e100",
- "product_commod": "uox",
- "swu_capacity": "1e100",
- "tails_assay": "0.003",
- "tails_commod": "waste"
- }
- },
- "name": "enrichment"
- },
- {
- "config": {
- "Separations": {
- "feed_commod_prefs": {"val": "2.0"},
- "feed_commods": {"val": "spent_uox"},
- "feedbuf_size": "30001",
- "leftover_commod": "waste",
- "streams": {
- "item": {
- "commod": "sep_stream",
- "info": {"buf_size": "1e100", "efficiencies": {"item": {"comp": "Pu", "eff": ".99"}}}
- }
- },
- "throughput": "30001"
- }
- },
- "name": "separations"
- },
- {
- "config": {
- "FuelFab": {
- "fill_commods": {"val": "depleted_u"},
- "fill_recipe": "depleted_u",
- "fill_size": "30001",
- "fiss_commods": {"val": "sep_stream"},
- "fiss_size": "15000",
- "outcommod": "mox",
- "spectrum": "thermal",
- "throughput": "30001"
- }
- },
- "name": "fuelfab"
- },
- {
- "config": {
- "Reactor": {
- "assem_size": "30000",
- "cycle_time": "17",
- "fuel_incommods": {"val": ["uox", "mox"]},
- "fuel_inrecipes": {"val": ["fresh_uox", "fresh_mox"]},
- "fuel_outcommods": {"val": ["spent_uox", "waste"]},
- "fuel_outrecipes": {"val": ["spent_uox", "spent_mox"]},
- "fuel_prefs": {"val": ["1.0", "2.0"]},
- "n_assem_batch": "1",
- "n_assem_core": "3",
- "refuel_time": "2"
- }
- },
- "name": "reactor"
- },
- {
- "config": {"Sink": {"capacity": "1e100", "in_commods": {"val": "waste"}}},
- "name": "repo"
- },
- {
- "config": {"Source": {"outcommod": "depleted_u", "outrecipe": "depleted_u"}},
- "name": "depleted_src"
- }
- ],
- "recipe": [
- {
- "basis": "mass",
- "name": "natl_u",
- "nuclide": [{"comp": "0.711", "id": "U235"}, {"comp": "99.289", "id": "U238"}]
- },
- {
- "basis": "mass",
- "name": "fresh_uox",
- "nuclide": [{"comp": "0.04", "id": "U235"}, {"comp": "0.96", "id": "U238"}]
- },
- {
- "basis": "mass",
- "name": "depleted_u",
- "nuclide": [{"comp": "0.003", "id": "U235"}, {"comp": "0.997", "id": "U238"}]
- },
- {
- "basis": "mass",
- "name": "fresh_mox",
- "nuclide": [
- {"comp": "0.0027381", "id": "U235"},
- {"comp": "0.9099619", "id": "U238"},
- {"comp": "0.001746", "id": "Pu238"},
- {"comp": "0.045396", "id": "Pu239"},
- {"comp": "0.020952", "id": "Pu240"},
- {"comp": "0.013095", "id": "Pu241"},
- {"comp": "0.005238", "id": "Pu242"}
- ]
- },
- {
- "basis": "mass",
- "name": "spent_mox",
- "nuclide": [
- {"comp": "0.0017381", "id": "U235"},
- {"comp": "0.90", "id": "U238"},
- {"comp": "0.001746", "id": "Pu238"},
- {"comp": "0.0134", "id": "Pu239"},
- {"comp": "0.020952", "id": "Pu240"},
- {"comp": "0.013095", "id": "Pu241"},
- {"comp": "0.005238", "id": "Pu242"}
- ]
- },
- {
- "basis": "mass",
- "name": "spent_uox",
- "nuclide": [
- {"comp": "156.729", "id": "U235"},
- {"comp": "102.103", "id": "U236"},
- {"comp": "18280.324", "id": "U238"},
- {"comp": "13.656", "id": "Np237"},
- {"comp": "5.043", "id": "Pu238"},
- {"comp": "106.343", "id": "Pu239"},
- {"comp": "41.357", "id": "Pu240"},
- {"comp": "36.477", "id": "Pu241"},
- {"comp": "15.387", "id": "Pu242"},
- {"comp": "1.234", "id": "Am241"},
- {"comp": "3.607", "id": "Am243"},
- {"comp": "0.431", "id": "Cm244"},
- {"comp": "1.263", "id": "Cm245"}
- ]
- }
- ],
- "region": {
- "config": {"NullRegion": null},
- "institution": {
- "config": {"NullInst": null},
- "initialfacilitylist": {
- "entry": [
- {"number": "1", "prototype": "repo"},
- {"number": "1", "prototype": "reactor"},
- {"number": "1", "prototype": "depleted_src"},
- {"number": "1", "prototype": "fuelfab"},
- {"number": "1", "prototype": "separations"},
- {"number": "1", "prototype": "enrichment"}
- ]
- },
- "name": "SingleInstitution"
- },
- "name": "SingleRegion"
- }
- }
-}
\ No newline at end of file
diff --git a/neams/json.wbh b/neams/json.wbh
deleted file mode 100644
index 5029425..0000000
--- a/neams/json.wbh
+++ /dev/null
@@ -1,55 +0,0 @@
-rule("Quoted string") {
-
- foreground {
- red = 128
- green = 0
- blue = 0
- }
- pattern = """'[^']*'|"[^"]*""""
-}
-rule("Keyword") {
- bold = true
- foreground {
- red = 0
- green = 0
- blue = 128
- }
- pattern = "\s*((read|end)\s+\S+|end\s*$)"
-}
-rule("Number") {
- bold = true
- foreground {
- red = 128
- green = 0
- blue = 0
- }
- pattern = "\b[-+]?(\d+\.?\d*|\.\d+)([eE][-+]?\d+)?\b"
-}
-rule("Boolean") {
- bold = true
- foreground {
- red = 128
- green = 0
- blue = 128
- }
- pattern = "\b(true|false)\b"
-}
-rule("Null") {
- bold = true
- foreground {
- red = 255
- green = 255
- blue = 255
- }
- pattern = "\b(true|false)\b"
-}
-
-rule("Curly") {
- bold = true
- foreground {
- red = 255
- green = 0
- blue = 0
- }
- pattern = "{|}"
-}
diff --git a/neams/schema.xml b/neams/schema.xml
deleted file mode 100644
index 9baef17..0000000
--- a/neams/schema.xml
+++ /dev/null
@@ -1,180 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- @Facility_REFS@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- @Region_REFS@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- @Inst_REFS@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/neams/schema_to_sch.ipynb b/neams/schema_to_sch.ipynb
deleted file mode 100644
index bbdb567..0000000
--- a/neams/schema_to_sch.ipynb
+++ /dev/null
@@ -1,803 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "\n",
- "import copy\n",
- "import xmltodict\n",
- "import numpy as np\n",
- "import json\n",
- "import pprint"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "# % is the comment for SON"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "f = json.loads(open('m.json').read())"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "specs = f['specs']\n",
- "ex = specs[-4]\n",
- "\n",
- "\n",
- "#def read_schema()\n",
- "xmltodict.parse(f['schema'][ex])['interleave'].keys()\n",
- "d = xmltodict.parse(f['schema'][ex])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {
- "scrolled": false
- },
- "outputs": [
- {
- "data": {
- "text/plain": [
- "{'alias': 'in_capacity',\n",
- " 'doc': 'number of commodity units that can be taken at each timestep (infinite capacity can be represented by a very large number)',\n",
- " 'index': 3,\n",
- " 'shape': [-1],\n",
- " 'tooltip': 'input commodity capacity',\n",
- " 'type': 'double',\n",
- " 'uilabel': 'Incoming Throughput'}"
- ]
- },
- "execution_count": 8,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "f['annotations'][':agents:KFacility']['vars']['in_capacity']"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "[':agents:KFacility',\n",
- " ':agents:NullInst',\n",
- " ':agents:NullRegion',\n",
- " ':agents:Predator',\n",
- " ':agents:Prey',\n",
- " ':agents:Sink',\n",
- " ':agents:Source',\n",
- " ':cycamore:DeployInst',\n",
- " ':cycamore:Enrichment',\n",
- " ':cycamore:FuelFab',\n",
- " ':cycamore:GrowthRegion',\n",
- " ':cycamore:ManagerInst',\n",
- " ':cycamore:Mixer',\n",
- " ':cycamore:Reactor',\n",
- " ':cycamore:Separations',\n",
- " ':cycamore:Sink',\n",
- " ':cycamore:Source',\n",
- " ':cycamore:Storage']"
- ]
- },
- "execution_count": 6,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "archetypes = f['specs']\n",
- "archetypes"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'feed_commod_prefs': {'MaxOccurs': 1, 'val': {'ValType': 'Real'}},\n",
- " 'feed_commods': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'val': {'MinOccurs': 1, 'ValType': 'String'}},\n",
- " 'feed_recipe': {'MaxOccurs': 1, 'ValType': 'String'},\n",
- " 'feedbuf_size': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'latitude': {'MaxOccurs': 1, 'ValType': 'Real'},\n",
- " 'leftover_commod': {'MaxOccurs': 1, 'ValType': 'String'},\n",
- " 'leftoverbuf_size': {'MaxOccurs': 1, 'ValType': 'Real'},\n",
- " 'longitude': {'MaxOccurs': 1, 'ValType': 'Real'},\n",
- " 'streams': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'item': {'MinOccurs': 1,\n",
- " 'commod': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'ValType': 'String'},\n",
- " 'info': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'buf_size': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'ValType': 'Real'},\n",
- " 'efficiencies': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'item': {'MinOccurs': 1,\n",
- " 'comp': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'ValType': 'String'},\n",
- " 'eff': {'MaxOccurs': 1,\n",
- " 'MinOccurs': 1,\n",
- " 'ValType': 'Real'}}}}}},\n",
- " 'throughput': {'MaxOccurs': 1, 'ValType': 'Real'}}\n"
- ]
- }
- ],
- "source": [
- "conversion_dict = {'string': 'String',\n",
- " 'nonNegativeInteger': 'Int',\n",
- " 'boolean': 'Int',\n",
- " 'double': 'Real',\n",
- " 'positiveInteger': 'Int',\n",
- " 'float': 'Real',\n",
- " 'double': 'Real',\n",
- " 'duration': 'Int',\n",
- " 'integer': 'Int',\n",
- " 'nonPositiveInteger': 'Int',\n",
- " 'negativeInteger': 'Int',\n",
- " 'long': 'Real',\n",
- " 'int': 'Int',\n",
- " 'token': 'Real'\n",
- " }\n",
- "\n",
- "def read_element(eld, from_one_or_more=False, optional=False):\n",
- " if 'interleave' in eld.keys():\n",
- " s = read_interleave(eld['interleave'], eld['@name'], from_one_or_more, optional)\n",
- " return s\n",
- "\n",
- " # now there's optional and non-optional\n",
- " keys = eld.keys()\n",
- " if not from_one_or_more:\n",
- " options = {'MaxOccurs': 1}\n",
- " else:\n",
- " options = {}\n",
- "\n",
- " if optional:\n",
- " options = options\n",
- " else:\n",
- " options['MinOccurs'] = 1\n",
- " \n",
- " s = {eld['@name']: options}\n",
- " www = np.random.uniform(0, 10)\n",
- " if 'oneOrMore' in keys:\n",
- " s[eld['@name']].update(read_element(eld['oneOrMore']['element'],\n",
- " from_one_or_more=True, optional=optional)\n",
- " )\n",
- "\n",
- " return s\n",
- "\n",
- " if 'data' in keys:\n",
- " options['ValType'] = conversion_dict[eld['data']['@type']]\n",
- " s[eld['@name']] = options\n",
- " return s\n",
- "\n",
- " \n",
- "def read_interleave(intd, name, from_one_or_more, optional):\n",
- " if not optional:\n",
- " options = {'MinOccurs':1}\n",
- " else:\n",
- " options = {}\n",
- " if not from_one_or_more:\n",
- " options['MaxOccurs'] = 1\n",
- " d = {name: options}\n",
- " for i in intd['element']:\n",
- " d[name].update(read_element(i))\n",
- " return d\n",
- "\n",
- "\n",
- "\n",
- "sep_dict = {}\n",
- "d = xmltodict.parse(f['schema'][':cycamore:Separations'])\n",
- "for i in json.loads(json.dumps(d['interleave']['element'])):\n",
- " sep_dict.update(read_element(i))\n",
- "for j in json.loads(json.dumps(d['interleave']['optional'])):\n",
- " sep_dict.update(read_element(j['element'], optional=True))\n",
- "pprint.pprint(sep_dict)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "'\\n'"
- ]
- },
- "execution_count": 8,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "f['schema'][':agents:NullRegion']"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {
- "scrolled": true
- },
- "outputs": [
- {
- "ename": "NameError",
- "evalue": "name 'schema_dict_string_to_template' is not defined",
- "output_type": "error",
- "traceback": [
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
- "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
- "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[0mschema_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mread_element\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'element'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptional\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 27\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 28\u001b[0;31m \u001b[0mtemplate_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mschema_dict_string_to_template\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mschema_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkey\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
- "\u001b[0;31mNameError\u001b[0m: name 'schema_dict_string_to_template' is not defined"
- ]
- }
- ],
- "source": [
- "def check_if_list(d):\n",
- " # this is because some schemas,\n",
- " # if there's only one entry would have\n",
- " # a dictionary, while if there's multiple\n",
- " # would have a list\n",
- " if 'ict' in str(type(d)):\n",
- " return [d]\n",
- " else:\n",
- " return d\n",
- " \n",
- "\n",
- "schema_dict = {}\n",
- "template_dict = {}\n",
- "for key in f['specs']:\n",
- " if 'NullRegion' in key or 'NullInst' in key:\n",
- " schema_dict[key] = {}\n",
- " continue\n",
- " libname = key.split(':')[1]\n",
- " schema_dict[key] = {}\n",
- " \n",
- " k = check_if_list(xmltodict.parse(f['schema'][key])['interleave']['element'])\n",
- " for i in k:\n",
- " schema_dict[key].update(read_element(i))\n",
- " k = check_if_list(xmltodict.parse(f['schema'][key])['interleave']['optional'])\n",
- " for i in k:\n",
- " schema_dict[key].update(read_element(i['element'], optional=True))\n",
- "\n",
- " template_dict[key] = schema_dict_string_to_template(schema_dict[key], key)\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'current_capacity': {'MaxOccurs': 1, 'ValType': 'Real'},\n",
- " 'in_capacity': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'in_commod': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'k_factor_in': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'k_factor_out': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'max_inv_size': {'MaxOccurs': 1, 'ValType': 'Real'},\n",
- " 'out_capacity': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'out_commod': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'},\n",
- " 'recipe_name': {'MaxOccurs': 1, 'MinOccurs': 1, 'ValType': 'Real'}}\n"
- ]
- },
- {
- "ename": "KeyError",
- "evalue": "':agents:KFacility'",
- "output_type": "error",
- "traceback": [
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
- "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
- "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mpprint\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mschema_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtemplate_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
- "\u001b[0;31mKeyError\u001b[0m: ':agents:KFacility'"
- ]
- }
- ],
- "source": [
- "pprint.pprint(schema_dict[key])\n",
- "\n",
- "print(template_dict[key])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {},
- "outputs": [],
- "source": [
- "def reasonable_linebreak(string, lim=60):\n",
- " nlines = len(string) // lim\n",
- "\n",
- " space_indices = []\n",
- " for i in range(nlines):\n",
- " n = (i+1)*lim\n",
- " space_indices.append(string[n:].find(' ') + n)\n",
- "\n",
- " new_str = ''\n",
- " for indx, val in enumerate(string):\n",
- " if indx not in space_indices:\n",
- " new_str += val\n",
- " else:\n",
- " new_str += val + '\\n'\n",
- "\n",
- " return new_str\n",
- " \n",
- " \n",
- "def schema_dict_string_to_template(d, key):\n",
- " d = {key:d}\n",
- " c = copy.deepcopy(d)\n",
- " s = pprint.pformat(delete_keys_from_dict(c, ['MaxOccurs', 'MinOccurs', 'ValType']))\n",
- " s = s[1:]\n",
- " s = '\\n'.join(s.split('\\n'))\n",
- " s = s[:-1]\n",
- " s = s.replace(\"'\", '')\n",
- " s = s.replace(',', '')\n",
- " s = s.replace(':', '')\n",
- " s = s.replace('\"', '')\n",
- " s = s.split('\\n')\n",
- " n = []\n",
- " for i in s:\n",
- " var = i.strip().split()[0]\n",
- " # print(dd[key])\n",
- " if var == 'streams':\n",
- " var = 'streams_'\n",
- " if var not in schema_dict[key].keys():\n",
- " # multiline variables with weird things\n",
- " print('skipping', var)\n",
- " continue\n",
- " # see if optional\n",
- " \n",
- " if 'MinOccur' not in d[key][var]:\n",
- " optional = '(optional)'\n",
- " else:\n",
- " optional = ''\n",
- " doc = reasonable_linebreak(optional + ' ' +f['annotations'][key]['doc'] ).split('\\n')\n",
- " for j in doc:\n",
- " n.append('%' + j)\n",
- " n.append(i.strip())\n",
- "\n",
- " return '\\n'.join(n)\n",
- " \n",
- "\n",
- "def delete_keys_from_dict(dict_del, lst_keys):\n",
- " for k in lst_keys:\n",
- " try:\n",
- " del dict_del[k]\n",
- " except KeyError:\n",
- " pass\n",
- " for v in dict_del.values():\n",
- " if isinstance(v, dict):\n",
- " delete_keys_from_dict(v, lst_keys)\n",
- "\n",
- " return dict_del\n",
- "\n",
- " \n",
- "def check_if_list(d):\n",
- " # this is because some schemas,\n",
- " # if there's only one entry would have\n",
- " # a dictionary, while if there's multiple\n",
- " # would have a list\n",
- " if 'ict' in str(type(element)):\n",
- " return [d]\n",
- " else:\n",
- " return d"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "from generate_sch import generate_schema\n",
- "import pprint"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {
- "scrolled": true
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "skipping piecewise_function\n",
- "skipping type\n",
- "skipping start\n",
- "skipping pref\n",
- "skipping info\n",
- "skipping streams_\n",
- "skipping info\n",
- "skipping efficiencies\n",
- "skipping eff\n"
- ]
- }
- ],
- "source": [
- "obj = generate_schema('t')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "simulation{\n",
- "\n",
- " control {\n",
- " duration = 1234\n",
- " startmonth = 1\n",
- " startyear = 2020\n",
- " explicit_inventory=0\n",
- " dt=2629846\n",
- " decay=\"lazy\"\n",
- " }\n",
- " \n",
- " archetypes { % This part is automatically filled! No need to worry\n",
- " spec = {lib=\"agents\" name=\"KFacility\"}\n",
- " spec = {lib=\"agents\" name=\"Predator\"}\n",
- " spec = {lib=\"agents\" name=\"Prey\"}\n",
- " spec = {lib=\"agents\" name=\"Sink\"}\n",
- " spec = {lib=\"agents\" name=\"Source\"}\n",
- " spec = {lib=\"cycamore\" name=\"DeployInst\"}\n",
- " spec = {lib=\"cycamore\" name=\"Enrichment\"}\n",
- " spec = {lib=\"cycamore\" name=\"FuelFab\"}\n",
- " spec = {lib=\"cycamore\" name=\"GrowthRegion\"}\n",
- " spec = {lib=\"cycamore\" name=\"ManagerInst\"}\n",
- " spec = {lib=\"cycamore\" name=\"Mixer\"}\n",
- " spec = {lib=\"cycamore\" name=\"Reactor\"}\n",
- " spec = {lib=\"cycamore\" name=\"Separations\"}\n",
- " spec = {lib=\"cycamore\" name=\"Sink\"}\n",
- " spec = {lib=\"cycamore\" name=\"Source\"}\n",
- " spec = {lib=\"cycamore\" name=\"Storage\"}\n",
- "\n",
- " }\n",
- "\n",
- "\n",
- " facility {\n",
- " config {\"autocomplete here\"} \n",
- " }\n",
- " facility {\n",
- " config {\"there can be multiple facilities\"} \n",
- " }\n",
- "\n",
- " \n",
- " region {\n",
- " config {\"autocomplete here\"}\n",
- " }\n",
- " region {\n",
- " config {\"there can be multiple regions\"}\n",
- " }\n",
- "\n",
- " \n",
- " recipe {\n",
- " \"write your recipes here\"\n",
- " }\n",
- " recipe {\n",
- " \"there can be multiple recipes\"\n",
- " }\n",
- "}\n"
- ]
- }
- ],
- "source": [
- "print(obj.init_template)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "\t\t\t%Reactor is a simple, general reactor based on static compositional \n",
- "\t\t\t%transformations to model fuel burnup. The user specifies \n",
- "\t\t\t%a set of input fuels and corresponding burnt compositions \n",
- "\t\t\t%that fuel is transformed to when it is discharged from the \n",
- "\t\t\t%core. No incremental transmutation takes place. Rather, at \n",
- "\t\t\t%the end of an operational cycle, the batch being discharged \n",
- "\t\t\t%from the core is instantaneously transmuted from its original \n",
- "\t\t\t%fresh fuel composition into its spent fuel form.\n",
- "\t\t\t%\n",
- "\t\t\t%Each fuel \n",
- "\t\t\t%is identified by a specific input commodity and has an associated \n",
- "\t\t\t%input recipe (nuclide composition), output recipe, \n",
- "\t\t\t%output commidity, and preference. The preference identifies \n",
- "\t\t\t%which input fuels are preferred when requesting. Changes in \n",
- "\t\t\t%these preferences can be specified as a function of time using \n",
- "\t\t\t%the pref_change variables. Changes in the input-output \n",
- "\t\t\t%recipe compositions can also be specified as a function of \n",
- "\t\t\t%time using the recipe_change variables.\n",
- "\t\t\t%\n",
- "\t\t\t%The reactor treats \n",
- "\t\t\t%fuel as individual assemblies that are never split, combined \n",
- "\t\t\t%or otherwise treated in any non-discrete way. Fuel is requested \n",
- "\t\t\t%in full-or-nothing assembly sized quanta. If real-world \n",
- "\t\t\t%assembly modeling is unnecessary, parameters can be adjusted \n",
- "\t\t\t%(e.g. n_assem_core, assem_size, n_assem_batch). At the \n",
- "\t\t\t%end of every cycle, a full batch is discharged from the core \n",
- "\t\t\t%consisting of n_assem_batch assemblies of assem_size kg. The \n",
- "\t\t\t%reactor also has a specifiable refueling time period following \n",
- "\t\t\t%the end of each cycle at the end of which it will resume \n",
- "\t\t\t%operation on the next cycle *if* it has enough fuel for a \n",
- "\t\t\t%full core; otherwise it waits until it has enough fresh fuel \n",
- "\t\t\t%assemblies.\n",
- "\t\t\t%\n",
- "\t\t\t%In addition to its core, the reactor has an on-hand \n",
- "\t\t\t%fresh fuel inventory and a spent fuel inventory whose \n",
- "\t\t\t%capacities are specified by n_assem_fresh and n_assem_spent \n",
- "\t\t\t%respectively. Each time step the reactor will attempt to acquire \n",
- "\t\t\t%enough fresh fuel to fill its fresh fuel inventory (and \n",
- "\t\t\t%its core if the core isn't currently full). If the fresh \n",
- "\t\t\t%fuel inventory has zero capacity, fuel will be ordered just-in-time \n",
- "\t\t\t%after the end of each operational cycle before the next \n",
- "\t\t\t%begins. If the spent fuel inventory becomes full, the reactor \n",
- "\t\t\t%will halt operation at the end of the next cycle until \n",
- "\t\t\t%there is more room. Each time step, the reactor will try \n",
- "\t\t\t%to trade away as much of its spent fuel inventory as possible.\n",
- "\t\t\t%\n",
- "\t\t\t%When \n",
- "\t\t\t%the reactor reaches the end of its lifetime, it will \n",
- "\t\t\t%discharge all material from its core and trade away all its \n",
- "\t\t\t%spent fuel as quickly as possible. Full decommissioning will \n",
- "\t\t\t%be delayed until all spent fuel is gone. If the reactor \n",
- "\t\t\t%has a full core when it is decommissioned (i.e. is mid-cycle) \n",
- "\t\t\t%when the reactor is decommissioned, half (rounded up to nearest \n",
- "\t\t\t%int) of its assemblies are transmuted to their respective \n",
- "\t\t\t%burnt compositions.\n",
- "\t\t\t%\n",
- "\t\t\t%(optional) Mass (kg) of a single assembly.\n",
- "\t\t\tassem_size {}\n",
- "\t\t\t%(optional) Number of time steps since the start of the last cycle. \n",
- "\t\t\t%Only set this if you know what you are doing\n",
- "\t\t\tcycle_step {}\n",
- "\t\t\t%(optional) The duration of a full operational cycle (excluding \n",
- "\t\t\t%refueling time) in time steps.\n",
- "\t\t\tcycle_time {}\n",
- "\t\t\t%(optional) If true, the archetype transmutes all assemblies upon \n",
- "\t\t\t%decommissioning If false, the archetype only transmutes \n",
- "\t\t\t%half.\n",
- "\t\t\tdecom_transmute_all {}\n",
- "\t\t\t%(optional) Ordered list of input commodities on which to requesting \n",
- "\t\t\t%fuel.\n",
- "\t\t\tfuel_incommods {val {}}\n",
- "\t\t\t%(optional) Fresh fuel recipes to request for each of the given \n",
- "\t\t\t%fuel input commodities (same order).\n",
- "\t\t\tfuel_inrecipes {val {}}\n",
- "\t\t\t%(optional) Output commodities on which to offer spent fuel originally \n",
- "\t\t\t%received as each particular input commodity (same \n",
- "\t\t\t%order).\n",
- "\t\t\tfuel_outcommods {val {}}\n",
- "\t\t\t%(optional) Spent fuel recipes corresponding to the given fuel \n",
- "\t\t\t%input commodities (same order). Fuel received via a particular \n",
- "\t\t\t%input commodity is transmuted to the recipe specified here \n",
- "\t\t\t%after being burned during a cycle.\n",
- "\t\t\tfuel_outrecipes {val {}}\n",
- "\t\t\t%(optional) The preference for each type of fresh fuel requested \n",
- "\t\t\t%corresponding to each input commodity (same order). If no \n",
- "\t\t\t%preferences are specified, 1.0 is used for all fuel requests \n",
- "\t\t\t%(default).\n",
- "\t\t\tfuel_prefs {val {}}\n",
- "\t\t\t%(optional) Latitude of the agent's geographical position. The \n",
- "\t\t\t%value should be expressed in degrees as a double.\n",
- "\t\t\tlatitude {}\n",
- "\t\t\t%(optional) Longitude of the agent's geographical position. The \n",
- "\t\t\t%value should be expressed in degrees as a double.\n",
- "\t\t\tlongitude {}\n",
- "\t\t\t%(optional) Number of assemblies that constitute a single batch. \n",
- "\t\t\t% This is the number of assemblies discharged from the core \n",
- "\t\t\t%fully burned each cycle.Batch size is equivalent to ``n_assem_batch \n",
- "\t\t\t%/ n_assem_core``.\n",
- "\t\t\tn_assem_batch {}\n",
- "\t\t\t%(optional) Number of assemblies that constitute a full core.\n",
- "\t\t\t%\n",
- "\t\t\tn_assem_core {}\n",
- "\t\t\t%(optional) Number of fresh fuel assemblies to keep on-hand if \n",
- "\t\t\t%possible.\n",
- "\t\t\tn_assem_fresh {}\n",
- "\t\t\t%(optional) Number of spent fuel assemblies that can be stored \n",
- "\t\t\t%on-site before reactor operation stalls.\n",
- "\t\t\tn_assem_spent {}\n",
- "\t\t\t%(optional) Amount of electrical power the facility produces when \n",
- "\t\t\t%operating normally.\n",
- "\t\t\tpower_cap {}\n",
- "\t\t\t%(optional) The name of the 'power' commodity used in conjunction \n",
- "\t\t\t%with a deployment curve.\n",
- "\t\t\tpower_name {}\n",
- "\t\t\t%(optional) The input commodity for a particular fuel preference \n",
- "\t\t\t%change. Same order as and direct correspondence to the specified \n",
- "\t\t\t%preference change times.\n",
- "\t\t\tpref_change_commods {val {}}\n",
- "\t\t\t%(optional) A time step on which to change the request preference \n",
- "\t\t\t%for a particular fresh fuel type.\n",
- "\t\t\tpref_change_times {val {}}\n",
- "\t\t\t%(optional) The new/changed request preference for a particular \n",
- "\t\t\t%fresh fuel. Same order as and direct correspondence to the \n",
- "\t\t\t%specified preference change times.\n",
- "\t\t\tpref_change_values {val {}}\n",
- "\t\t\t%(optional) The input commodity indicating fresh fuel for which \n",
- "\t\t\t%recipes will be changed. Same order as and direct correspondence \n",
- "\t\t\t%to the specified recipe change times.\n",
- "\t\t\trecipe_change_commods {val {}}\n",
- "\t\t\t%(optional) The new input recipe to use for this recipe change. \n",
- "\t\t\t%Same order as and direct correspondence to the specified recipe \n",
- "\t\t\t%change times.\n",
- "\t\t\trecipe_change_in {val {}}\n",
- "\t\t\t%(optional) The new output recipe to use for this recipe change. \n",
- "\t\t\t%Same order as and direct correspondence to the specified \n",
- "\t\t\t%recipe change times.\n",
- "\t\t\trecipe_change_out {val {}}\n",
- "\t\t\t%(optional) A time step on which to change the input-output recipe \n",
- "\t\t\t%pair for a requested fresh fuel.\n",
- "\t\t\trecipe_change_times {val {}}\n",
- "\t\t\t%(optional) The duration of a full refueling period - the minimum \n",
- "\t\t\t%time between the end of a cycle and the start of the next \n",
- "\t\t\t%cycle.\n",
- "\t\t\trefuel_time {}\n",
- "\t\t\t%(optional) Ordered vector of the quantity of side product the \n",
- "\t\t\t%reactor produces with power\n",
- "\t\t\tside_product_quantity {val {}}\n",
- "\t\t\t%(optional) Ordered vector of side product the reactor produces \n",
- "\t\t\t%with power\n",
- "\t\t\tside_products {val {}}\n"
- ]
- }
- ],
- "source": [
- "print(obj.template_dict['Reactor'])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "skipping piecewise_function\n",
- "skipping type\n",
- "skipping start\n",
- "skipping pref\n",
- "skipping info\n",
- "skipping streams_\n",
- "skipping info\n",
- "skipping efficiencies\n",
- "skipping eff\n"
- ]
- }
- ],
- "source": [
- "from generate_sch import main\n",
- "main()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "! open ~/Downloads/Workbench-Darwin/bin/Workbench"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/neams/templates/NullInst.tmpl b/neams/templates/NullInst.tmpl
deleted file mode 100644
index 008cdc6..0000000
--- a/neams/templates/NullInst.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-NullInst
\ No newline at end of file
diff --git a/neams/templates/NullRegion.tmpl b/neams/templates/NullRegion.tmpl
deleted file mode 100644
index e7968fc..0000000
--- a/neams/templates/NullRegion.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-NullRegion
\ No newline at end of file
diff --git a/neams/workbench_files_generate.ipynb b/neams/workbench_files_generate.ipynb
deleted file mode 100644
index 925f2cb..0000000
--- a/neams/workbench_files_generate.ipynb
+++ /dev/null
@@ -1,436 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "import xmltodict\n",
- "import numpy as np\n",
- "import json\n",
- "import os\n",
- "from pprint import pprint"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {},
- "outputs": [],
- "source": [
- "def convert_valtype(valtype):\n",
- " # conversion from Cyclus valtype [key]\n",
- " # to Workbench ValType [val]\n",
- " conversion_dict = {'string': 'String',\n",
- " 'nonNegativeInteger': 'Int',\n",
- " 'boolean': 'Int',\n",
- " 'double': 'Real',\n",
- " 'positiveInteger': 'Int',\n",
- " 'float': 'Real',\n",
- " 'double': 'Real',\n",
- " 'duration': 'Int',\n",
- " 'integer': 'Int',\n",
- " 'nonPositiveInteger': 'Int',\n",
- " 'negativeInteger': 'Int',\n",
- " 'long': 'Real',\n",
- " 'int': 'Int',\n",
- " }\n",
- "\n",
- " additional_dict = {'nonNegativeInteger': {'MinValInc': 0},\n",
- " 'positiveInteger': {'MinValExc': 0},\n",
- " 'boolean': {'ValEnums': [0, 1]},\n",
- " 'duration': {'MinValInc': 0},\n",
- " 'nonPositiveInteger': {'MaxValInc': 0},\n",
- " 'negativeInteger': {'MaxValExc': 0},\n",
- " }\n",
- " d = {'valType': conversion_dict[valtype]}\n",
- " if valtype in additional_dict:\n",
- " for key, val in additional_dict[valtype].items():\n",
- " d[key] = val\n",
- " return d\n",
- "\n",
- "def convert_num_limits(limit_type):\n",
- " conversion_dict = {'oneOrMore': {'ChildAtLeastOne': True},\n",
- " 'zeroOrMore': {'MinOccurs': 0}\n",
- " }\n",
- "\n",
- "\n",
- "def change_element(element_dict):\n",
- " d = {element_dict['@name']: convert_valtype(element_dict['data']['@type'])}\n",
- " "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 15,
- "metadata": {},
- "outputs": [],
- "source": [
- "i = x['grammar']['start']['element']['interleave']['element']\n",
- "ctrl = i[0]\n",
- "recipe = x['grammar']['start']['element']['interleave']['zeroOrMore'][1]\n",
- "\"\"\"\n",
- "def fui_generator(tree):\n",
- " fui_str = ''\n",
- " fui_str += '\"%s\"{\\n' %tree['@name']\n",
- " fui_str += '\\ttype=object\\n'\n",
- " # get essential variables first\n",
- " for entry in tree['interleave']['element']:\n",
- " options = parse_options(entry)\n",
- " fui_str += '\\t%s{\\n\\t\\t%s\\n\\t\\t}\\n' %(entry['@name'], '\\n\\t\\t'.join(options))\n",
- " # optional variables\n",
- " for entry in tree['interleave']['optional']:\n",
- " entry = entry['element']\n",
- " options = parse_options(entry)\n",
- " options.append('need=optional')\n",
- " fui_str += '\\t%s{\\n\\t\\t%s\\n\\t\\t}\\n' %(entry['@name'], '\\n\\t\\t'.join(options))\n",
- " print(fui_str)\n",
- "\"\"\" \n",
- "def convert_valtype(val):\n",
- " if 'string' in val.lower() or 'str' in val.lower():\n",
- " return 'string'\n",
- " else:\n",
- " return 'real'\n",
- "\n",
- "def parse_options(entry_dict):\n",
- " option_list = []\n",
- "\n",
- " if 'data' not in entry_dict:\n",
- " if 'text' in entry_dict.keys():\n",
- " return ['type=string']\n",
- " else:\n",
- " return []\n",
- " for key, val in entry_dict['data'].items():\n",
- " if key == '@type':\n",
- " option_list.append('type=%s' %convert_valtype(val))\n",
- " else:\n",
- " print('what else %s' %key)\n",
- " \n",
- " return option_list\n",
- " "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 95,
- "metadata": {},
- "outputs": [],
- "source": [
- "###\n",
- "# generate highlight files\n",
- "##\n",
- "##\n",
- "rgb_dict = {'black': [0,0,0],\n",
- " 'white': [255, 255, 255],\n",
- " 'red': [255, 0, 0],\n",
- " 'lime': [0, 255, 0],\n",
- " 'blue': [0, 0, 255],\n",
- " 'yellow': [255, 255, 0],\n",
- " 'cyan': [0, 255, 255],\n",
- " 'magenta': [255, 0, 255],\n",
- " 'silver': [192, 192, 192]}\n",
- "def highlight_maker(name, word, color='blue'):\n",
- " s = \"\"\"rule(\"%s\") {\n",
- " pattern = \"%s\"\n",
- " bold = true\n",
- " foreground {\n",
- " red = %i\n",
- " green = %i\n",
- " blue = %i\n",
- " }\n",
- "}\n",
- " \n",
- " \"\"\" %(name, word, rgb_dict[color][0], rgb_dict[color][1], rgb_dict[color][2])\n",
- " return s\n",
- "highlight_str = ''\n",
- "for i in ['simulation', 'control', 'archetypes',\n",
- " 'facility', 'region', 'recipe']:\n",
- " highlight_str += highlight_maker(i, i)\n",
- "# highlight_str += highlight_maker('brack_open', '{', 'red')\n",
- "# highlight_str += highlight_maker('brack_close', '}', 'red')\n",
- "# highlight_str += highlight_maker('square_open', '[', 'lime')\n",
- "# highlight_str += highlight_maker('square_close', ']', 'lime')\n",
- "highlight_str += '''rule(\"Quoted string\") {\n",
- "pattern = \"\"\"'[^']*'|\"[^\"]*\"\"\"\"\n",
- "bold = true\n",
- "foreground {\n",
- "red = 255\n",
- "green = 130\n",
- "blue = 0\n",
- "}\n",
- "background {\n",
- "red = 255\n",
- "green = 130\n",
- "blue = 0\n",
- "alpha = 25\n",
- "}\n",
- "}\n",
- "\n",
- "rule(\"equal\"){\n",
- "pattern=\"=\"\n",
- "background{\n",
- "red=192\n",
- "green=192\n",
- "blue=192\n",
- "}\n",
- "}\n",
- "\n",
- "rule(\"comment\"){\n",
- "pattern=\"%*\"\n",
- "foreground{\n",
- "red=0\n",
- "green=255\n",
- "blue=0\n",
- "}\n",
- "}\n",
- "'''"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 96,
- "metadata": {},
- "outputs": [],
- "source": [
- "###\n",
- "# generate schema files\n",
- "##\n",
- "## This is manually written because there are so many unnecessary things that are not needed\n",
- "## things like the commodity block are goners\n",
- "sch_str = \"\"\"simulation{\n",
- " Description=\"Agent-based fuel cycle simulator\"\n",
- " InputTmpl=\"init_template\"\n",
- " control {\n",
- " MinOccurs=1\n",
- " Description=\"Defines simulation time and decay methods\"\n",
- " MaxOccurs=1\n",
- " duration={\n",
- " MinOccurs=1\n",
- " MaxOccurs=1\n",
- " Description=\"the number of timesteps in simulation\"\n",
- " ValType=Int\n",
- " }\n",
- " startyear={\n",
- " MinOccurs=1\n",
- " MaxOccurs=1\n",
- " Description=\"the year to start the simulation\"\n",
- " ValType=Int\n",
- " }\n",
- " startmonth={\n",
- " MinOccurs=1\n",
- " MaxOccurs=1\n",
- " Description=\"the month to start the simulation\"\n",
- " ValType=Int\n",
- " ValEnums=[ 1 2 3 4 5 6 7 8 9 10 11 12 ]\n",
- " }\n",
- " decay={\n",
- " MinOccurs=0\n",
- " MaxOccurs=1\n",
- " Description=\"How to model decay in Cyclus\"\n",
- " ValType=String\n",
- " ValEnums=[\"lazy\" \"manual\" \"never\"]\n",
- " }\n",
- " dt={\n",
- " MinOccurs=0\n",
- " MaxOccurs=1\n",
- " ValType=Real\n",
- " Description=\"duration of a single timestep in seconds\"\n",
- " }\n",
- " explicit_inventory={\n",
- " MinOccurs=0\n",
- " MaxOccurs=1\n",
- " ValType=Int\n",
- " ValEnums=[0 1]\n",
- " Description=\"boolean specifying whether or nor to track inventory in each agent\"\n",
- " }\n",
- " \n",
- " }\n",
- "\n",
- " archetypes {\n",
- " MinOccurs=1\n",
- " Description=\"Defines the archetypes used in this simulation\"\n",
- " MaxOccurs=1\n",
- " spec={\n",
- " MinOccurs=1\n",
- " lib={MinOccurs=1\n",
- " MaxOccurs=1\n",
- " ValType=String\n",
- " }\n",
- " name={MinOccurs=1\n",
- " MaxOccurs=1\n",
- " ValType=String\n",
- " }\n",
- " }\n",
- " }\n",
- " \n",
- " facility {\n",
- " Description=\"Facility definition block\"\n",
- " MinOccurs=1\n",
- " %facility_schema\n",
- " }\n",
- " \n",
- " region{\n",
- " Description=\"Region definition block\"\n",
- " MinOccurs=1\n",
- " %region_schema\n",
- " }\n",
- " \n",
- " recipe{\n",
- " Description=\"Recipe definition block\"\n",
- " name={\n",
- " MinOccurs=1\n",
- " MaxOccurs=1\n",
- " ValType=String\n",
- " }\n",
- " basis={\n",
- " MinOccurs=1\n",
- " MaxOccurs=1\n",
- " ValType=String\n",
- " ValEnums=[\"mass\" \"atom\"]\n",
- " }\n",
- " nuclide={\n",
- " MinOccurs=1\n",
- " id={MinOccurs=1 MaxOccurs=1}\n",
- " comp={MinOccurs=1 MaxOccurs=1 ValType=Real}\n",
- " }\n",
- " }\n",
- "\n",
- "}\n",
- "\"\"\"\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 97,
- "metadata": {},
- "outputs": [],
- "source": [
- "###\n",
- "# generate initial template file\n",
- "##\n",
- "##\n",
- "init_template = \"\"\"simulation{\n",
- "\n",
- " control {\n",
- " duration = 1234\n",
- " startmonth = 1\n",
- " startyear = 2020\n",
- " explicit_inventory=0\n",
- " dt=2629846\n",
- " decay=\"lazy\"\n",
- " }\n",
- " \n",
- " archetypes {\n",
- " spec [\"this will be automatically filled\"]\n",
- " }\n",
- "\n",
- " facility {\n",
- " \"write your facilities here\"\n",
- " }\n",
- " \n",
- " region {\n",
- " \"write your regions here\"\n",
- " }\n",
- " \n",
- " recipe {\n",
- " \"write your recipes here\"\n",
- " }\n",
- "}\"\"\"\n",
- "\n",
- "template_dict = {'init_template': init_template}\n",
- "# archetypes spec automatically filled from distribution"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 98,
- "metadata": {},
- "outputs": [],
- "source": [
- "###\n",
- "# generate grammar files\n",
- "##\n",
- "##\n",
- "\n",
- "schema_path = '/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.sch'\n",
- "template_dir = '/Users/4ib/Desktop/git/cyclus_gui/neams/templates/'\n",
- "highlight_path = '/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.wbh'\n",
- "\n",
- "grammar_str = \"\"\"name= Cyclus\n",
- "enabled = true\n",
- "\n",
- "parser = waspson\n",
- "schema = \"%s\"\n",
- "validator = wasp\n",
- "\n",
- "templates = \"%s\"\n",
- "\n",
- "highlighter = \"%s\"\n",
- "\n",
- "extensions = [cyclus]\n",
- "\"\"\" %(schema_path, template_dir, highlight_path)\n",
- "\n",
- "\n",
- "grammar_path = '/Users/4ib/.workbench/2.0.0/grammars/cyclus.wbg'\n",
- "\n",
- "with open(grammar_path, 'w') as f:\n",
- " f.write(grammar_str)\n",
- "\n",
- "with open(schema_path, 'w') as f:\n",
- " f.write(sch_str)\n",
- " \n",
- "with open(highlight_path, 'w') as f:\n",
- " f.write(highlight_str)\n",
- "\n",
- "for key, val in template_dict.items():\n",
- " with open(os.path.join(template_dir, key+'.tmpl'), 'w') as f:\n",
- " f.write(val)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 100,
- "metadata": {},
- "outputs": [],
- "source": [
- "! open ~/Downloads/Workbench-Darwin/bin/Workbench"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "1. put the grammar file into .workbench/2.0.0/grammars/*.wbh\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/reqs.txt b/reqs.txt
new file mode 100644
index 0000000..92ff663
--- /dev/null
+++ b/reqs.txt
@@ -0,0 +1,254 @@
+absl-py==0.7.1
+alabaster==0.7.12
+altgraph==0.16.1
+anaconda-client==1.7.2
+anaconda-navigator==1.9.6
+anaconda-project==0.8.2
+appnope==0.1.0
+appscript==1.1.0
+asn1crypto==0.24.0
+astor==0.7.1
+astroid==2.2.5
+astropy==3.1.2
+atomicwrites==1.3.0
+attrs==19.1.0
+Babel==2.6.0
+backcall==0.1.0
+backports.os==0.1.1
+backports.shutil-get-terminal-size==1.0.0
+bcrypt==3.1.7
+beautifulsoup4==4.7.1
+bitarray==0.8.3
+bkcharts==0.2
+blaze==0.11.3
+bleach==3.1.0
+blinker==1.4
+bokeh==1.0.4
+boto==2.49.0
+boto3==1.9.125
+botocore==1.12.125
+Bottleneck==1.2.1
+bz2file==0.98
+certifi==2019.11.28
+cffi==1.12.2
+chardet==3.0.4
+Click==7.0
+cloudpickle==0.8.1
+clyent==1.2.2
+colorama==0.4.1
+conda==4.7.12
+conda-build==3.17.8
+conda-package-handling==1.6.0
+conda-verify==3.1.1
+contextlib2==0.5.5
+cryptography==2.6.1
+cycler==0.10.0
+cyclus-gui==0.1
+Cython==0.29.6
+cytoolz==0.9.0.1
+dask==1.1.5
+datashape==0.5.4
+decorator==4.4.0
+defusedxml==0.5.0
+distributed==1.26.1
+docutils==0.14
+entrypoints==0.3
+et-xmlfile==1.0.1
+fastcache==1.0.2
+filelock==3.0.10
+Flask==1.0.2
+Flask-Cors==3.0.7
+future==0.17.1
+gast==0.2.2
+gensim==3.4.0
+gevent==1.3.7
+glob2==0.6
+gmpy2==2.0.8
+greenlet==0.4.13
+grpcio==1.19.0
+h5py==2.8.0
+heapdict==1.0.0
+html5lib==1.0.1
+idna==2.8
+imageio==2.5.0
+imagesize==1.1.0
+importlib-metadata==0.0.0
+inspyred==1.0.1
+ipykernel==5.1.0
+ipython==7.4.0
+ipython-genutils==0.2.0
+ipywidgets==7.4.2
+isort==4.3.16
+itsdangerous==1.1.0
+jdcal==1.4
+jedi==0.13.3
+Jinja2==2.10
+jmespath==0.9.4
+jsonschema==3.0.1
+jupyter-client==5.2.4
+jupyter-console==6.0.0
+jupyter-core==4.4.0
+jupyterlab==0.35.4
+jupyterlab-server==0.2.0
+Keras==2.2.4
+Keras-Applications==1.0.7
+Keras-Preprocessing==1.0.9
+keyring==19.0.1
+kiwisolver==1.0.1
+lazy-object-proxy==1.3.1
+libarchive-c==2.8
+lief==0.9.0
+llvmlite==0.26.0
+locket==0.2.0
+lxml==4.3.3
+macholib==1.11
+Markdown==3.1
+MarkupSafe==1.1.1
+matplotlib==3.0.3
+mccabe==0.6.1
+mistune==0.8.4
+mkl-fft==1.0.10
+mkl-random==1.0.2
+mock==2.0.0
+more-itertools==4.3.0
+mpmath==1.1.0
+msgpack==0.6.1
+multipledispatch==0.6.0
+navigator-updater==0.2.1
+nbconvert==5.4.1
+nbformat==4.4.0
+networkx==2.2
+nltk==3.2.5
+nose==1.3.7
+notebook==5.7.7
+numba==0.41.0
+numexpr==2.6.9
+numpy==1.16.2
+numpydoc==0.8.0
+oauthlib==3.0.1
+odo==0.5.1
+olefile==0.46
+openpyxl==2.6.1
+packaging==19.0
+pandas==0.24.2
+pandocfilters==1.4.2
+paramiko==2.6.0
+parso==0.3.4
+partd==0.3.9
+path.py==11.5.0
+pathlib2==2.3.3
+patsy==0.5.1
+pbr==5.1.3
+pefile==2018.8.8
+pep8==1.7.1
+pexpect==4.6.0
+phame==0.1
+pickleshare==0.7.5
+Pillow==5.4.1
+pkginfo==1.5.0.1
+pluggy==0.9.0
+ply==3.11
+prometheus-client==0.6.0
+prompt-toolkit==2.0.9
+protobuf==3.7.1
+psutil==5.6.1
+ptyprocess==0.6.0
+py==1.8.0
+py4j==0.10.7
+pycodestyle==2.5.0
+pycosat==0.6.3
+pycparser==2.19
+pycrypto==2.6.1
+pycurl==7.43.0.2
+pyflakes==2.1.1
+Pygments==2.3.1
+PyInstaller==3.4
+PyJWT==1.7.1
+pylint==2.3.1
+PyNaCl==1.3.0
+pyne==0.5.11
+pyodbc==4.0.26
+pyOpenSSL==19.0.0
+pyparsing==2.3.1
+pyrsistent==0.14.11
+PySocks==1.6.8
+pyspark==2.4.5
+pytest==4.4.0
+pytest-arraydiff==0.3
+pytest-astropy==0.5.0
+pytest-doctestplus==0.3.0
+pytest-openfiles==0.3.1
+pytest-remotedata==0.3.1
+python-crfsuite==0.9.6
+python-dateutil==2.8.0
+pytz==2018.9
+PyWavelets==1.0.2
+PyYAML==5.1
+pyzmq==18.0.0
+QtAwesome==0.5.7
+qtconsole==4.4.3
+QtPy==1.7.0
+requests==2.21.0
+requests-oauthlib==1.2.0
+rope==0.10.7
+ruamel-yaml==0.15.71
+s3transfer==0.2.0
+scikit-image==0.14.2
+scikit-learn==0.20.0
+scipy==1.1.0
+seaborn==0.9.0
+Send2Trash==1.5.0
+simplegeneric==0.8.1
+singledispatch==3.4.0.3
+six==1.12.0
+smart-open==1.8.0
+snowballstemmer==1.2.1
+sortedcollections==1.1.2
+sortedcontainers==2.1.0
+soupsieve==1.9
+Sphinx==2.0.0
+sphinxcontrib-applehelp==1.0.1
+sphinxcontrib-devhelp==1.0.1
+sphinxcontrib-htmlhelp==1.0.1
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.2
+sphinxcontrib-serializinghtml==1.1.1
+sphinxcontrib-websupport==1.1.0
+spyder==3.3.1
+spyder-kernels==0.4.3
+SQLAlchemy==1.3.1
+statsmodels==0.9.0
+sympy==1.3
+tables==3.4.4
+tblib==1.3.2
+tensorboard==1.13.1
+tensorflow==1.13.1
+tensorflow-estimator==1.13.0
+termcolor==1.1.0
+terminado==0.8.2
+testpath==0.4.2
+textblob==0.15.3
+toolz==0.9.0
+tornado==6.0.2
+tqdm==4.31.1
+traitlets==4.3.2
+tweepy==3.8.0
+twython==3.7.0
+typed-ast==1.3.1
+unicodecsv==0.14.1
+urllib3==1.24.1
+utility-fcns==0.1
+wcwidth==0.1.7
+webencodings==0.5.1
+Werkzeug==0.15.1
+widgetsnbextension==3.4.2
+wordcloud==1.6.0
+wrapt==1.11.1
+wurlitzer==1.0.2
+xlrd==1.2.0
+XlsxWriter==1.1.5
+xlwings==0.15.5
+xlwt==1.3.0
+xmltodict==0.12.0
+zict==0.1.4
+zipp==0.3.3
diff --git a/run.sh b/run.sh
deleted file mode 100755
index e0894da..0000000
--- a/run.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-pip install -r requirements.txt
-python gui.py
\ No newline at end of file
diff --git a/tests/unit_test.py b/tests/unit_test.py
new file mode 100644
index 0000000..a6f4047
--- /dev/null
+++ b/tests/unit_test.py
@@ -0,0 +1,46 @@
+import numpy as np
+import unittest
+from cyclus_gui.gui.sim_window import SimulationWindow
+from cyclus_gui.gui.arche_window import ArchetypeWindow
+from cyclus_gui.gui.proto_window import PrototypeWindow
+from cyclus_gui.gui.region_window import RegionWindow
+from cyclus_gui.gui.recipe_window import RecipeWindow
+from cyclus_gui.gui.backend_window import BackendWindow
+import os
+
+
+def skip_init(cls):
+ actual_init = cls.__init__
+ cls.__init__ = lambda *args, **kwargs: None
+ instance = cls()
+ cls.__init__ = actual_init
+ return instance
+
+class sim_unit_test(unittest.TestCase):
+
+ def test_is_it_pos_integer(self):
+ obj = skip_init(SimulationWindow)
+ q_a_dict = {1:True,
+ 2.1:False,
+ -1:False,
+ 4:True}
+ for key, val in q_a_dict.items():
+ self.assertEqual(obj.is_it_pos_integer(key), val)
+
+
+class arche_unit_test(unittest.TestCase):
+
+ def test_get_metafile_from_git(self):
+ obj = skip_init(ArchetypeWindow)
+ arche = obj.get_metafile_from_git('./meta.json')
+ os.remove('./meta.json')
+ answer = [['agents', 'NullInst'], ['agents', 'NullRegion'], ['cycamore', 'Source'],
+ ['cycamore', 'Sink'], ['cycamore', 'DeployInst'], ['cycamore', 'Enrichment'],
+ ['cycamore', 'FuelFab'], ['cycamore', 'GrowthRegion'], ['cycamore', 'ManagerInst'],
+ ['cycamore', 'Mixer'], ['cycamore', 'Reactor'], ['cycamore', 'Separations'],
+ ['cycamore', 'Storage']]
+ for i in answer:
+ self.assertTrue(i in arche)
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/workbench/README.md b/workbench/README.md
new file mode 100644
index 0000000..680cfd8
--- /dev/null
+++ b/workbench/README.md
@@ -0,0 +1,6 @@
+# Cyclus - Workbench Integration
+
+## cyclus_simple.py
+Runtime file. Move this to your rte directory in workbench, and set the paths in the file so that it will find the other file-generating scripts in `.../cyclus_gui/workbench/cyclus`
+
+Clikcing on `update and print grammar` on workbench will populate the files need for the template, autofill, and so on.
\ No newline at end of file
diff --git a/workbench/cyclus/cyclus.wbp b/workbench/cyclus/cyclus.wbp
new file mode 100644
index 0000000..1bb8cdd
--- /dev/null
+++ b/workbench/cyclus/cyclus.wbp
@@ -0,0 +1,66 @@
+% how it's set up:
+% the postrun generates a csv file with a specific keyword in it
+% with BEGIN and END lines
+% that allows the postprocessor to recognize / parse the appropriate values
+
+% material flow agent to agent
+extensions = [csv]
+filter_pattern = "CYCLUS"
+
+processor("Material flow agent to agent") {
+ delimiter = ","
+
+ logic = """${AWK} "/BEGIN trade_flow_agent/,/END trade_flow_agent/" ${CURRENT_FILE}"""
+
+ graph("Material flow agent to agent"){
+ % key axis (x-axis) label
+ key_axis_label = "Timesteps"
+
+ % value axis (y-axis) label
+ value_axis_label = "Mass [kg]"
+
+ % key axis scale (linear, log)
+ key_axis_scale = linear
+
+ % line style (line, stepleft, stepright, stepcenter, impulse, none)
+ line_style = "line"
+
+ % series keys (x-values)
+ keys = "a2:?2" % timesteps
+
+ % series values (y-values)
+ values = "f4:?4"
+
+ }
+
+}
+
+
+processor("Agent flow deployed") {
+ delimiter = ","
+
+ logic = """${AWK} "/BEGIN agent_flow_deployed/,/END agent_flow_deployed/" ${CURRENT_FILE}"""
+
+ graph("Material flow agent to agent"){
+ % key axis (x-axis) label
+ key_axis_label = "Timesteps"
+
+ % value axis (y-axis) label
+ value_axis_label = "Deployed"
+
+ % key axis scale (linear, log)
+ key_axis_scale = linear
+
+ % line style (line, stepleft, stepright, stepcenter, impulse, none)
+ line_style = "line"
+
+ % series keys (x-values)
+ keys = "a2:?2" % timesteps
+
+ % series values (y-values)
+ values = "b4:?4"
+
+ }
+
+}
+
diff --git a/workbench/cyclus/cyclus_processor.py b/workbench/cyclus/cyclus_processor.py
new file mode 100644
index 0000000..7336908
--- /dev/null
+++ b/workbench/cyclus/cyclus_processor.py
@@ -0,0 +1,338 @@
+import numpy as np
+import os
+import shutil
+import json
+import copy
+import matplotlib.pyplot as plt
+import sqlite3 as lite
+from argparse import ArgumentParser, FileType, Namespace, SUPPRESS
+import sys
+here = os.path.abspath(os.path.dirname(__file__))
+sys.path.append(os.path.join(here, os.pardir, 'util'))
+from processor import load_environment, BinnedData, Sheet, Options, Processor
+
+
+class CyclusPostrunner:
+ def __init__(self, sqlite_path):
+ #!
+ self.get_cursor(sqlite_path)
+ self.get_times()
+ self.get_id_proto_dict()
+ self.el_z_dict = {'H': 1, 'He': 2, 'Li': 3, 'Be': 4, 'B': 5, 'C': 6, 'N': 7, 'O': 8, 'F': 9, 'Ne': 10, 'Na': 11, 'Mg': 12, 'Al': 13, 'Si': 14, 'P': 15, 'S': 16, 'Cl': 17, 'Ar': 18, 'K': 19, 'Ca': 20, 'Sc': 21, 'Ti': 22, 'V': 23, 'Cr': 24, 'Mn': 25, 'Fe': 26, 'Co': 27, 'Ni': 28, 'Cu': 29, 'Zn': 30, 'Ga': 31, 'Ge': 32, 'As': 33, 'Se': 34, 'Br': 35, 'Kr': 36, 'Rb': 37, 'Sr': 38, 'Y': 39, 'Zr': 40, 'Nb': 41, 'Mo': 42, 'Tc': 43, 'Ru': 44, 'Rh': 45, 'Pd': 46, 'Ag': 47, 'Cd': 48, 'In': 49, 'Sn': 50, 'Sb': 51, 'Te': 52, 'I': 53, 'Xe': 54, 'Cs': 55, 'Ba': 56, 'La': 57, 'Ce': 58, 'Pr': 59, 'Nd': 60, 'Pm': 61, 'Sm': 62, 'Eu': 63, 'Gd': 64, 'Tb': 65, 'Dy': 66, 'Ho': 67, 'Er': 68, 'Tm': 69, 'Yb': 70, 'Lu': 71, 'Hf': 72, 'Ta': 73, 'W': 74, 'Re': 75, 'Os': 76, 'Ir': 77, 'Pt': 78, 'Au': 79, 'Hg': 80, 'Tl': 81, 'Pb': 82, 'Bi': 83, 'Po': 84, 'At': 85, 'Rn': 86, 'Fr': 87, 'Ra': 88, 'Ac': 89, 'Th': 90, 'Pa': 91, 'U': 92, 'Np': 93, 'Pu': 94, 'Am': 95, 'Cm': 96, 'Bk': 97, 'Cf': 98, 'Es': 99, 'Fm': 100, 'Md': 101, 'No': 102, 'Lr': 103}
+ self.z_el_dict = {v:k for k, v in self.el_z_dict.items()}
+ self.csv_string = 'CYCLUS\n'
+ self.generate_trade_flow('prototype')
+ self.generate_trade_flow('agent')
+ self.generate_commodity_flow()
+ self.generate_agent_flow()
+ self.generate_timeseries_flow()
+
+
+ with open(sqlite_path.replace('.sqlite', '.csv'), 'w') as f:
+ f.write(self.csv_string)
+
+ ###########################
+ # Auxiliary functions
+ ###########################
+ def nucid_convert(self, nucid):
+ e = int(nucid) // 10000
+ a = e % 1000
+ z = e // 1000
+ name = self.z_el_dict[z]
+ if self.config_dict['nuc_notation'].get() == 'ZZAAA':
+ return str(z) + str(a)
+ else:
+ return name + str(a)
+
+
+ def timestep_to_date(self, timestep):
+ timestep = np.array(timestep)
+ month = self.init_month + (timestep * (self.dt / 2629846))
+ year = self.init_year + month//12
+ month = month%12
+ dates = [x+(y/12) for x, y in zip(year, month)]
+ return dates
+
+
+ def get_cursor(self, sqlite_path):
+ con = lite.connect(sqlite_path)
+ con.row_factory = lite.Row
+ self.cur = con.cursor()
+
+
+ def get_times(self):
+ i = self.cur.execute('SELECT * FROM info').fetchone()
+ self.init_year = i['InitialYear']
+ self.init_month = i['InitialMonth']
+ self.duration = i['Duration']
+ i = self.cur.execute('SELECT * FROM TimeStepDur').fetchone()
+ self.dt = i['DurationSecs']
+
+
+ def get_id_proto_dict(self):
+ # returns dictionary of key
+ agentids = self.cur.execute('SELECT agentid, prototype, kind FROM agententry').fetchall()
+ self.id_proto_dict = {agent['agentid']:agent['prototype'] for agent in agentids if agent['kind']=='Facility'}
+
+
+
+ def get_iso_flow_dict(self, where_phrase, n_isos, time_col_name='Time'):
+ q = self.cur.execute('SELECT time, quantity, resources.qualid, nucid, sum(quantity*massfrac) FROM transactions INNER JOIN resources ON transactions.resourceid = resources.resourceid INNER JOIN compositions on compositions.qualid = resources.qualid WHERE %s GROUP BY nucid, time' %where_phrase).fetchall()
+ uniq_ = self.cur.execute('SELECT DISTINCT(nucid) FROM transactions INNER JOIN resources ON resources.resourceid = transactions.resourceid INNER JOIN compositions ON compositions.qualid = resources.qualid WHERE %s' %where_phrase).fetchall()
+ timeseries_dict = {q['nucid']:np.zeros(self.duration) for q in uniq_}
+
+ for row in q:
+ timeseries_dict[row['nucid']][row['time']] = row['sum(quantity*massfrac)']
+ keys = sorted(timeseries_dict.keys(), key=lambda i:sum(timeseries_dict[i]), reverse=True)[:n_isos]
+ x = np.arange(self.duration)
+ return x, {self.nucid_convert(k):v for k,v in timeseries_dict.items() if k in keys}
+
+
+ def query_result_to_timeseries(self, query_result, col_name,
+ time_col_name='time'):
+ x = np.arange(self.duration)
+ y = np.zeros(self.duration)
+ for i in query_result:
+ y[int(i[time_col_name])] += i[col_name]
+ return x, y
+
+
+ def query_result_to_dict(self, query_result, vary_col_name, val_col,
+ time_col_name='time'):
+ x = np.arange(self.duration)
+ y = {}
+ keys = list(set([q[vary_col_name] for q in query_result]))
+ for i in keys:
+ y[i] = np.zeros(self.duration)
+ for i in query_result:
+ y[i[vary_col_name]] += i[val_col]
+ y1 = {k:np.mean(v) for k, v in y.items()}
+ n = int(self.config_dict['n_isos'].get())
+ keys = sorted(y1, key=y1.__getitem__, reverse=True)[:n]
+ new_y = {k:v for k, v in y.items() if k in keys}
+
+
+ ###########################
+ ###########################
+ # Generation functions
+ ###########################
+
+
+ def generate_trade_flow(self, groupby):
+ traders = self.cur.execute('SELECT DISTINCT senderid, receiverid, commodity FROM transactions').fetchall()
+ table_dict = {'sender':[], 'receiver': [], 'commodity': []}
+ if groupby == 'agent':
+ table_dict['sender'] = [self.id_proto_dict[i['senderid']] + '(%s)' %str(i['senderid']) for i in traders]
+ table_dict['receiver'] = [self.id_proto_dict[i['receiverid']] + '(%s)' %str(i['receiverid']) for i in traders]
+ table_dict['commodity'] = [i['commodity'] for i in traders]
+ elif groupby == 'prototype':
+ already = []
+ for i in traders:
+ checker = [self.id_proto_dict[i['senderid']], self.id_proto_dict[i['receiverid']], i['commodity']]
+ if checker in already:
+ continue
+ else:
+ already.append(checker)
+ table_dict['sender'].append(checker[0])
+ table_dict['receiver'].append(checker[1])
+ table_dict['commodity'].append(checker[2])
+
+ # should generate a csv with all possible combinations?
+ # format is the following:
+ # first row: keyword
+ # second row: x values (space separated)
+ # sender, receiver, commodity, y (space separated)
+ self.csv_string += 'BEGIN trade_flow_%s' %groupby + '\n'
+ for indx, val in enumerate(table_dict['sender']):
+ s, r, c = table_dict['sender'][indx], table_dict['receiver'][indx], table_dict['commodity'][indx]
+ x, y = self.get_trade_flow(s, r, c, groupby=groupby)
+ if indx != 0:
+ if (prev_x != x).all():
+ raise ValueError('The x values are not the same!')
+ else:
+ self.csv_string += ','.join([str(q) for q in x]) + '\n'
+ if groupby == 'prototype':
+ self.csv_string += 'sender -> [commodity] -> receiver, trade_mass\n'
+ else:
+ self.csv_string += 'sender (senderid) -> [commodity] -> receiver (receiverid), trade_mass\n'
+
+ if groupby == 'prototype':
+ self.csv_string += '%s -> [%s] -> %s' %(s,c,r) + ',' + ','.join([str(q) for q in y]) + '\n'
+ else: #groupby == 'agent'
+ sender_name = s[:s.index('(')]
+ receiver_name = r[:r.index('(')]
+ sender_id = s[s.index('(')+1:s.index(')')]
+ receiver_id = r[r.index('(')+1:r.index(')')]
+ label = '%s (%s) -> [%s] -> %s (%s)' %(sender_name, sender_id, c, receiver_name, receiver_id)
+ self.csv_string += label + ',' + ','.join([str(q) for q in y]) + '\n'
+
+ prev_x = x
+
+ self.csv_string += 'END trade_flow_%s\n' %groupby
+
+
+ def generate_commodity_flow(self):
+ commods = self.cur.execute('SELECT DISTINCT commodity FROM transactions').fetchall()
+ self.csv_string += 'BEGIN commodity_flow\n'
+ for indx, i in enumerate(commods):
+ commod = i['commodity']
+ x, y = self.get_commodity_flow(commod)
+ if indx != 0:
+ if (prev_x != x).all():
+ raise ValueError('The x values are not the same')
+ else:
+ self.csv_string += ','.join([str(q) for q in x]) + '\n'
+ self.csv_string += 'commodity, trade_mass\n'
+
+ self.csv_string += commod + ',' + ','.join([str(q) for q in y])+'\n'
+ prev_x = x
+
+ self.csv_string += 'END commodity_flow\n'
+
+
+ def generate_agent_flow(self):
+ entry = self.cur.execute('SELECT DISTINCT prototype FROM agententry WHERE kind="Facility"').fetchall()
+ for which in ['entered', 'exited', 'deployed']:
+ self.csv_string += 'BEGIN agent_flow_%s\n' %which
+ for indx, i in enumerate(entry):
+ proto = i['prototype']
+ x, y = self.get_agent_flow(proto, which)
+ if indx != 0:
+ if (prev_x != x).all():
+ raise ValueError('The x values are not the same')
+ else:
+ self.csv_string += ','.join([str(q) for q in x]) + '\n'
+ self.csv_string += 'prototype, %s\n' %which
+
+ self.csv_string += proto + ',' + ','.join([str(q) for q in y]) + '\n'
+ prev_x = x
+
+ self.csv_string += 'END agent_flow_%s\n' %which
+
+
+
+ def generate_timeseries_flow(self):
+ tables = self.cur.execute('SELECT name FROM sqlite_master WHERE type="table"').fetchall()
+ timeseries_tables_list = [i['name'].replace('TimeSeries', '') for i in tables if 'TimeSeries' in i['name']]
+ timeseries_tables_list.sort()
+
+ self.csv_string += 'BEGIN timeseries_flow\n'
+ self.csv_string += ','.join([str(q) for q in np.arange(self.duration)]) + '\n'
+ self.csv_string += 'Timeseries (agentname (agentid)), timeseries_value\n'
+ for timeseries in timeseries_tables_list:
+ agentid_list = self.cur.execute('SELECT distinct agentid FROM TimeSeries%s' %timeseries).fetchall()
+ agentid_list = [i['agentid'] for i in agentid_list]
+ agentname_list = [self.id_proto_dict[i] for i in agentid_list]
+ for indx, val in enumerate(agentid_list):
+ x, y = self.get_timeseries_flow(timeseries, val)
+ self.csv_string += '%s (%s (%s))' %(timeseries, agentname_list[indx], val) + ',' + ','.join([str(q) for q in y]) + '\n'
+ x, y = self.get_timeseries_flow(timeseries, 'agg')
+ self.csv_string += '%s (total)' %timeseries + ',' + ','.join([str(q) for q in y]) + '\n'
+ self.csv_string += 'END timeseries_flow\n'
+
+
+
+ def generate_inventory_flow(self, groupby):
+ isit = self.cur.execute('SELECT * FROM InfoExplicitInv').fetchone()
+ if not isit['RecordInventory']:
+ raise ValueError('This simulation was run without `explicit_inventory` turned on.')
+
+ if groupby == 'agent':
+ # get list of agents
+ self.id_proto_dict
+
+ else:
+ # et list of prototypes
+ entry = self.cur.execute('SELECT DISTINCT prototype FROM agententry WHERE kind="Facility"').fetchall()
+ proto_list = [i['prototype'] for i in entry]
+ proto_list.sort(key=str.lower)
+
+
+
+
+
+ ###########################
+ ###########################
+ # Generation functions
+ ###########################
+
+ def get_trade_flow(self, sender, receiver, commodity, groupby):
+ n_isos = 0
+
+ if groupby == 'prototype':
+ receiver_id = [k for k,v in self.id_proto_dict.items() if v == receiver]
+ sender_id = [k for k,v in self.id_proto_dict.items() if v == sender]
+ else:
+ sender_name = sender[:sender.index('(')]
+ receiver_name = receiver[:receiver.index('(')]
+ sender_id = [sender[sender.index('(')+1:sender.index(')')]]
+ receiver_id = [receiver[receiver.index('(')+1:receiver.index(')')]]
+
+ str_sender_id = [str(q) for q in sender_id]
+ str_receiver_id = [str(q) for q in receiver_id]
+ if n_isos == 0:
+ query = 'SELECT sum(quantity), time FROM transactions INNER JOIN resources ON transactions.resourceid == resources.resourceid WHERE (senderid = ' + ' OR senderid = '.join(str_sender_id) + ') AND (receiverid = ' + ' OR receiverid = '.join(str_receiver_id) + ') GROUP BY time'
+ q = self.cur.execute(query).fetchall()
+ x, y = self.query_result_to_timeseries(q, 'sum(quantity)')
+ else:
+ x, y = self.get_iso_flow_dict('(senderid = ' + ' OR senderid = '.join(str_sender_id) + ') AND (receiverid = ' + ' OR receiverid = '.join(str_receiver_id) + ')', n_isos)
+
+ return x, y
+
+
+ def get_commodity_flow(self, commod):
+ n_isos = 0
+ if n_isos == 0:
+ movement = self.cur.execute('SELECT time, sum(quantity) FROM transactions INNER JOIN resources on transactions.resourceid==resources.resourceid WHERE commodity="%s" GROUP BY time' %commod).fetchall()
+ x, y = self.query_result_to_timeseries(movement, 'sum(quantity)')
+ else:
+ x, y = self.get_iso_flow_dict('commodity = "%s"' %commod, n_isos)
+ return x, y
+
+
+ def get_agent_flow(self, prototype, which):
+ entry = self.cur.execute('SELECT agentid, entertime FROM agententry WHERE prototype="%s"' %prototype).fetchall()
+ agent_id_list = [i['agentid'] for i in entry]
+ entertime = [i['entertime'] for i in entry]
+ exittime = []
+ for i in agent_id_list:
+ try:
+ exit = self.cur.execute('SELECT agentid, exittime FROM agentexit WHERE agentid=%s' %str(i)).fetchone()
+ except:
+ continue
+ if exit == None:
+ exittime.append(-1)
+ else:
+ exittime.append(exit['exittime'])
+
+ x = np.array(list(range(self.duration)))
+ y = []
+ if which == 'entered':
+ for time in x:
+ y.append(entertime.count(time))
+ elif which == 'exited':
+ for time in x:
+ y.append(exittime.count(time))
+ elif which == 'deployed':
+ deployed = 0
+ for time in x:
+ deployed += entertime.count(time)
+ deployed -= exittime.count(time)
+ y.append(deployed)
+
+ return x, y
+
+
+ def get_timeseries_flow(self, timeseries, agentid):
+ # agentid_list_q = self.cur.execute('SELECT distinct agentid FROM TimeSeries%s' %timeseries).fetchall()
+ # agentid_list = [i['agentid'] for i in agentid_list_q]
+ # agentname_list = [self.id_proto_dict[i] for i in agentid_list]
+ if agentid == 'agg':
+ series_q = self.cur.execute('SELECT time, sum(value) FROM Timeseries%s GROUP BY time' %timeseries).fetchall()
+ else:
+ series_q = self.cur.execute('SELECT time, sum(value) FROM Timeseries%s WHERE agentid=%s GROUP BY time' %(timeseries, str(agentid))).fetchall()
+ x, y = self.query_result_to_timeseries(series_q, 'sum(value)')
+ return x, y
+
+ def get_inventory_flow(self):
+ z=0
diff --git a/neams/generate_sch.py b/workbench/cyclus/generate_cyclus_sch.py
similarity index 88%
rename from neams/generate_sch.py
rename to workbench/cyclus/generate_cyclus_sch.py
index 9b7483f..2c9a5a6 100644
--- a/neams/generate_sch.py
+++ b/workbench/cyclus/generate_cyclus_sch.py
@@ -1,4 +1,4 @@
-import xmltodict
+#import xmltodict
import copy
import numpy as np
import json
@@ -6,7 +6,10 @@
import os
import subprocess
import pprint
-
+import sys
+here = os.path.dirname(os.path.abspath(__file__))
+sys.path.append(os.path.join(here, os.pardir, os.pardir, 'wasppy'))
+from xml2obj import xml2obj
class highlighter:
@@ -88,7 +91,7 @@ def make_basic_son(self):
return highlight_str
class generate_schema:
- def __init__(self, cyclus_cmd):
+ def __init__(self, cyclus_cmd, metadata_path='/Users/4ib/Desktop/git/cyclus_gui/neams/m.json'):
self.cyclus_cmd = cyclus_cmd
self.conversion_dict = {'string': 'String',
'nonNegativeInteger': 'Int',
@@ -290,12 +293,20 @@ def __init__(self, cyclus_cmd):
}
institution {
name="inst_name"
+ initialfacilitiylist {entry={num=1
+ prototype=proto
+ }
+ }
config{
% define institution here
}
}
institution {
name="inst_name"
+ initialfacilitiylist {entry={num=1
+ prototype=proto
+ }
+ }
config{
% define institution here
}
@@ -322,9 +333,12 @@ def get_cyclus_files(self):
# this is where everything happens
# temporary !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # meta_str = subprocess.run([self.cyclus_cmd, '-m'], stdout=subprocess.PIPE, shell=True).stdout.decode('utf-8')
- # self.meta_dict = json.loads(meta_str)
- self.meta_dict = json.loads(open('m.json').read())
+ p = subprocess.Popen([self.cyclus_cmd, '-m'], stdout=subprocess.PIPE)
+ meta_str = p.stdout.read()
+ self.meta_dict = json.loads(meta_str)
+ #heredir = os.path.abspath(os.path.dirname(__file__))
+ #self.meta_dict = json.loads(open(os.path.join(heredir, 'm.json')).read())
+
# temporary !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
archetypes = self.meta_dict['specs']
@@ -336,18 +350,18 @@ def get_cyclus_files(self):
for arche in archetypes:
name = arche.split(':')[-1]
self.type_dict[name] = self.meta_dict['annotations'][arche]['entity']
- self.schema_dict[name] = {'InputTmpl': '"%s"' %name}
+ self.schema_dict[name] = {'InputTmpl': '"%s"' %name.encode('ascii')}
if 'NullRegion' in arche or 'NullInst' in arche:
self.template_dict[name] = name+'= null'
continue
- d = xmltodict.parse(self.meta_dict['schema'][arche])['interleave']
+ d = dict(xml2obj(self.meta_dict['schema'][arche])._attrs)
+ #d = xmltodict.parse(self.meta_dict['schema'][arche])['interleave']
k = self.check_if_list(d['element'])
for i in k:
- self.schema_dict[name].update(self.read_element(i))
+ self.schema_dict[name].update(self.read_element(dict(i._attrs)))
k = self.check_if_list(d['optional'])
for i in k:
- self.schema_dict[name].update(self.read_element(i['element'], optional=True))
-
+ self.schema_dict[name].update(self.read_element(dict(i.element._attrs), optional=True))
if self.type_dict[name] == 'facility':
tab = ' ' * 16
elif self.type_dict[name] == 'region':
@@ -396,7 +410,7 @@ def get_cyclus_files(self):
def read_element(self, eld, from_one_or_more=False, optional=False):
if 'interleave' in eld.keys():
- s = self.read_interleave(eld['interleave'], eld['@name'], from_one_or_more, optional)
+ s = self.read_interleave(eld['interleave'], eld['name'], from_one_or_more, optional)
return s
# now there's optional and non-optional
@@ -408,19 +422,17 @@ def read_element(self, eld, from_one_or_more=False, optional=False):
if not optional:
options['MinOccurs'] = 1
- s = {eld['@name']: options}
- www = np.random.uniform(0, 10)
+ s = {eld['name'].encode('ascii'): options}
if 'oneOrMore' in keys:
-
# s = {eld['@name']: {}}
- s[eld['@name']].update(self.read_element(eld['oneOrMore']['element'],
- from_one_or_more=True)
+ s[eld['name']].update(self.read_element(dict(eld['oneOrMore']['element']._attrs),
+ from_one_or_more=True)
)
return s
if 'data' in keys:
- options['ValType'] = self.conversion_dict[eld['data']['@type']]
- s[eld['@name']] = options
+ options['ValType'] = self.conversion_dict[eld['data']['type']].encode('ascii')
+ s[eld['name']] = options
return s
@@ -470,10 +482,7 @@ def schema_dict_string_to_template(self, d, key, tab='\t'):
s = s[1:]
s = '\n'.join(s.split('\n'))
s = s[:-1]
- s = s.replace("'", '')
- s = s.replace(',', '')
- s = s.replace(':', '')
- s = s.replace('"', '')
+ s = s.replace("'", '').replace(',', '').replace(':', '').replace('"', '')
s = s.split('\n')
n = self.reasonable_linebreak(self.meta_dict['annotations'][key]['doc']).split('\n') + ['']
n = ['%'+w for w in n]
@@ -573,17 +582,14 @@ def read_interleave(self, intd, name, from_one_or_more, optional):
options = {}
if not from_one_or_more:
options['MaxOccurs'] = 1
- d = {name: options}
+ d = {name.encode('ascii'): options}
for i in intd['element']:
- d[name].update(self.read_element(i))
+ d[name].update(self.read_element(dict(i._attrs)))
return d
-
-
-
-def main(schema_path='/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.sch',
+def generate_cyclus_workbench_files(schema_path='/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.sch',
template_dir='/Users/4ib/Desktop/git/cyclus_gui/neams/templates/',
highlight_path='/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.wbh',
grammar_path='/Users/4ib/.workbench/2.0.0/grammars/cyclus.wbg',
@@ -623,7 +629,25 @@ def main(schema_path='/Users/4ib/Desktop/git/cyclus_gui/neams/cyclus.sch',
with open(highlight_path, 'w') as f:
f.write(h_.highlight_str)
+def clean_xml(s):
+ new = []
+ indx = 0
+ lines = s.split('\n')
+ while indx < len(lines):
+ line = lines[indx]
+ if '' in line:
+ line = line.replace('', '').replace(r'', '').replace('\n', '').strip()
+ if line == 'null':
+ line = ''
+ closing = new[-1].strip().replace('<',r'')
+ new[-1] = new[-1].replace('\n', '') + line + closing
+ indx += 1
+ else:
+ new.append(line)
+ indx += 1
+ return '\n'.join(new)
+
-if __name__ == '__main__':
- main()
\ No newline at end of file
+#if __name__ == '__main__':
+# generate_cyclus_workbench_files()
\ No newline at end of file
diff --git a/neams/m.json b/workbench/cyclus/m.json
similarity index 100%
rename from neams/m.json
rename to workbench/cyclus/m.json
diff --git a/neams/cyclus_remote.py b/workbench/cyclus_remote.py
similarity index 100%
rename from neams/cyclus_remote.py
rename to workbench/cyclus_remote.py
diff --git a/workbench/cyclus_simple.py b/workbench/cyclus_simple.py
new file mode 100644
index 0000000..9042875
--- /dev/null
+++ b/workbench/cyclus_simple.py
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+import os
+import sys
+import json
+# super import
+import workbench
+import subprocess
+here = os.path.dirname(os.path.abspath(__file__))
+sys.path.append(os.path.join(here, 'cyclus'))
+import generate_cyclus_sch
+from cyclus_processor import CyclusPostrunner
+
+
+class CyclusRuntimeEnvironment(workbench.WorkbenchRuntimeEnvironment):
+ """scale-specific runtime environment"""
+ def __init__(self):
+ """constructor"""
+ # call super class constructor
+ super(CyclusRuntimeEnvironment, self).__init__()
+ self.executable = 'cyclus'
+
+
+ def app_name(self):
+ """returns the app's self-designated name"""
+ return "cyclus"
+
+
+ def app_options(self):
+ """list of app-specific options"""
+ opts = []
+ return opts
+
+
+ def environment(self):
+ # this is here to see if it works.. why does it not work?
+ return {}
+
+
+ def run_args(self, options):
+ args = []
+ args.append('-i')
+ args.append(os.path.join(self.temp_xml_path))
+ args.append('-o')
+ pre, ext = os.path.splitext(options.input)
+ self.outpath = os.path.join(options.output_directory, pre + '.sqlite')
+ if os.path.exists(self.outpath):
+ self.echo(1, '#!!!!! Previous output file with name %s exists.' %self.outpath)
+ self.echo(1, '#!!!!! NOTE THAT Cyclus does not delete and create a new Sqlite file.')
+ self.echo(1, '#!!!!! It simply adds a new SimId.')
+ self.echo(1, '#!!!!! So you will have one file with multiple simulation results.')
+ args.append(self.outpath)
+ return args
+
+
+ def prerun(self, options):
+ # convert son into xml
+ binpath = os.path.join(here, os.pardir, 'bin')
+ sonjson_path = os.path.join(binpath, 'sonjson')
+ self.echo(1, "#### Converting SON to XML... ")
+ schema_file_path = os.path.join(here, os.pardir,
+ 'cyclus', 'cyclus.sch')
+ p = subprocess.Popen([sonjson_path, schema_file_path, options.input],
+ stdout=subprocess.PIPE)
+ json_str = p.stdout.read()
+ temp_json_path = os.path.join(self.working_directory, 'temp.json')
+ with open(temp_json_path, 'w') as f:
+ f.write(json_str)
+ p = subprocess.Popen([self.executable, '--json-to-xml', temp_json_path],
+ stdout=subprocess.PIPE)
+ xml_str = generate_cyclus_sch.clean_xml(p.stdout.read())
+ pre, ext = os.path.splitext(options.input)
+ self.temp_xml_path = os.path.join(self.working_directory, pre+'.xml')
+ with open(self.temp_xml_path, 'w') as f:
+ f.write(xml_str)
+ self.echo(1, "#### Finished converting to XML!")
+
+
+
+ def postrun(self, options):
+ """actions to perform after the run finishes"""
+ # here, we are going to try to get that sqlite to be a csv file
+ self.echo(1, '#### Postrunner on %s' %self.outpath)
+ CyclusPostrunner(self.outpath)
+ self.echo(1, '#### Finished Postrunner')
+
+
+
+ def update_and_print_grammar(self, grammar_path):
+ if self.executable == None:
+ import argparse
+ # if the -grammar flag appears earlier in the arg list than the -e, it won't have been set
+ # so, we must parse the argv for that case
+ parser_for_grammar = argparse.ArgumentParser()
+ parser_for_grammar.add_argument("-e", type=str)
+ known, unknown = parser_for_grammar.parse_known_args(sys.argv)
+ self.executable = known.e
+
+ if self.executable == None:
+ sys.stderr.write("***Error: The -grammar option requires -e argument!\n")
+ sys.exit(1)
+
+ """Checks the provided grammar file and determines if it is out of date
+ and if so, updates it accordingly"""
+ cyclus_bin_dir = os.path.dirname(self.executable)
+ cyclus_dir = os.path.dirname(cyclus_bin_dir)
+
+
+ #! technically here the grammar file would be generated from cyclus -m
+ #! from the user defined executable
+ # this whole thing is taken care:
+ # import generate_sch
+ # define paths to schema // template // highlight // grammar files
+ workbench_basedir = os.path.join(os.path.abspath(self.rte_dir), os.pardir)
+
+ workbench_cyclus_dir = os.path.join(workbench_basedir, 'cyclus')
+ if not os.path.isdir(workbench_cyclus_dir):
+ os.mkdir(workbench_cyclus_dir)
+ self.schema_file_path = os.path.join(workbench_cyclus_dir, 'cyclus.sch')
+ # create cyclus template folder if it does not exist:
+ self.template_dir_path = os.path.join(workbench_basedir, 'etc', 'Templates', 'cyclus')
+ if not os.path.isdir(self.template_dir_path):
+ os.mkdir(self.template_dir_path)
+ self.highlight_file_path = os.path.join(workbench_basedir, 'etc', 'grammars', 'highlighters', 'cyclus.wbh')
+ self.grammar_file_path = os.path.join(workbench_basedir, 'etc', 'grammars', 'cyclus.wbg')
+ generate_cyclus_sch.generate_cyclus_workbench_files(schema_path=self.schema_file_path,
+ template_dir=self.template_dir_path,
+ highlight_path=self.highlight_file_path,
+ grammar_path=self.grammar_file_path,
+ cyclus_cmd=self.executable)
+ return
+
+
+ def get_grammar_additional_resources(self, grammar_file_path):
+ """Returns a list of filepaths that need included which are not normally included"""
+ if grammar_file_path is None:
+ raise ValueError("The provided path to the grammar file is null")
+ if grammar_file_path == "":
+ raise ValueError("The provided path to the grammar file is empty")
+ return []
+
+
+
+
+if __name__ == "__main__":
+ CyclusRuntimeEnvironment().execute(sys.argv[1:])
diff --git a/neams/recycle.json b/workbench/examples/recycle.json
similarity index 100%
rename from neams/recycle.json
rename to workbench/examples/recycle.json
diff --git a/neams/recycle.son b/workbench/examples/recycle.son
similarity index 99%
rename from neams/recycle.son
rename to workbench/examples/recycle.son
index d613704..419d870 100644
--- a/neams/recycle.son
+++ b/workbench/examples/recycle.son
@@ -66,4 +66,7 @@ simulation{
}
}
}
-
\ No newline at end of file
+
+
+
+
diff --git a/neams/recycle.xml b/workbench/examples/recycle.xml
similarity index 100%
rename from neams/recycle.xml
rename to workbench/examples/recycle.xml
diff --git a/workbench/examples/son_input.son b/workbench/examples/son_input.son
new file mode 100644
index 0000000..ad241c9
--- /dev/null
+++ b/workbench/examples/son_input.son
@@ -0,0 +1,144 @@
+simulation{
+
+ control {
+ duration = 50
+ startmonth = 1
+ startyear = 2020
+ explicit_inventory=0
+ dt=2629846
+ decay="lazy"
+ }
+
+ archetypes { % This part is automatically filled! No need to worry
+ spec = {lib="agents" name="NullRegion"}
+ spec = {lib="agents" name="NullInst"}
+ spec = {lib="cycamore" name="Sink"}
+ spec = {lib="cycamore" name="Source"}
+ spec = {lib="cycamore" name="Storage"}
+ }
+
+
+ facility {
+ name="sink"
+ config {
+% A sink facility that accepts materials and products with a fixed
+%
+%throughput (per time step) capacity and a lifetime capacity
+%defined by
+% a total inventory size. The inventory size
+%and throughput capacity
+% both default to infinite. If a recipe
+%is provided, it will request
+% material with that recipe.
+%Requests are made for any number of
+% specified commodities
+%.
+%
+%
+Sink {
+ %(optional) [double] capacity the sink facility can accept at
+ %each time step
+ capacity =1e+299
+
+ %(optional) [[u'std::vector', u'double']] preferences for each
+ %of the given commodities, in the same order.Defauts to 1 if
+ %unspecified
+ in_commod_prefs {val = 1} % The value can be multiple values of val
+
+ % [[u'std::vector', u'std::string']] commodities that the sink
+ %facility accepts
+ in_commods {val = "in_sink"}
+
+ %(optional) [double] Latitude of the agent's geographical position.
+ %The value should be expressed in degrees as a double.
+ %
+ latitude =0.0
+
+ %(optional) [double] Longitude of the agent's geographical position.
+ %The value should be expressed in degrees as a double
+ %.
+ longitude =0.0
+
+ %(optional) [double] total maximum inventory size of sink fac
+ %ility
+ max_inv_size =1e+299
+}
+ % autocomplete here
+ }
+ }
+ facility {
+ name="source"
+ config {
+%This facility acts as a source of material with a fixed throughput
+%(per
+%time step) capacity and a lifetime capacity defined
+%by a total inventory
+%size. It offers its material as a single
+%commodity. If a composition
+%recipe is specified, it provides
+%that single material composition to
+%requesters. If unspecified,
+%the source provides materials with the exact
+%requested
+%compositions. The inventory size and throughput both
+%default to
+%infinite. Supplies material results in corresponding
+%decrease in
+%inventory, and when the inventory size reaches
+%zero, the source can provide
+%no more material.
+%
+%
+Source {
+ %(optional) [double] Total amount of material this source has
+ %remaining. Every trade decreases this value by the supplied
+ %material quantity. When it reaches zero, the source cannot
+ %provide any more material.
+ inventory_size =1e+299
+
+ %(optional) [double] Latitude of the agent's geographical position.
+ %The value should be expressed in degrees as a double.
+ %
+ latitude =0.0
+
+ %(optional) [double] Longitude of the agent's geographical position.
+ %The value should be expressed in degrees as a double
+ %.
+ longitude =0.0
+
+ % [std::string] Output commodity on which the source offers m
+ %aterial.
+ outcommod = "source_out"
+
+ %(optional) [double] amount of commodity that can be supplied
+ %at each time step
+ throughput =1e+299
+
+}
+ % there can be multiple facilities
+ }
+ }
+
+
+ region {
+ name="region_name"
+ config {
+NullRegion= null
+ % autocomplete here
+ }
+ institution {
+ name="inst"
+ initialfacilitylist {entry = {number=1
+ prototype=sink
+ }
+ entry = {number=1
+ prototype=source
+ }
+ }
+ config{NullInst= null}
+
+ }
+ }
+
+
+ }
\ No newline at end of file
diff --git a/workbench/m.json b/workbench/m.json
new file mode 100644
index 0000000..4ebb46d
--- /dev/null
+++ b/workbench/m.json
@@ -0,0 +1,2062 @@
+{
+ "annotations": {
+ ":agents:KFacility": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader"
+ ],
+ "doc": "A facility designed for integration tests that both provides and consumes commodities. It changes its request and offer amounts based on a power law with respect to time.",
+ "entity": "facility",
+ "name": "cyclus::KFacility",
+ "parents": ["cyclus::Facility"],
+ "vars": {
+ "current_capacity": {
+ "alias": "current_capacity",
+ "default": 0,
+ "doc": "number of output commodity units that can be supplied at the current time step (infinite capacity can be represented by a very large number )",
+ "index": 5,
+ "shape": [-1],
+ "tooltip": "current output capacity",
+ "type": "double",
+ "uilabel": "Current Capacity"
+ },
+ "in_capacity": {
+ "alias": "in_capacity",
+ "doc": "number of commodity units that can be taken at each timestep (infinite capacity can be represented by a very large number)",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "input commodity capacity",
+ "type": "double",
+ "uilabel": "Incoming Throughput"
+ },
+ "in_commod": {
+ "alias": "in_commod",
+ "doc": "commodity that the k-facility consumes",
+ "index": 0,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "input commodity",
+ "type": "std::string",
+ "uilabel": "Input Commodity",
+ "uitype": "incommodity"
+ },
+ "inventory": {
+ "alias": "inventory",
+ "capacity": "max_inv_size",
+ "index": 7,
+ "shape": [-1],
+ "tooltip": "inventory",
+ "type": "cyclus::toolkit::ResourceBuff",
+ "uilabel": "inventory"
+ },
+ "k_factor_in": {
+ "alias": "k_factor_in",
+ "doc": "conversion factor that governs the behavior of the k-facility's input commodity capacity",
+ "index": 8,
+ "shape": [-1],
+ "tooltip": "input k-factor",
+ "type": "double",
+ "uilabel": "Input K-Factor"
+ },
+ "k_factor_out": {
+ "alias": "k_factor_out",
+ "doc": "conversion factor that governs the behavior of the k-facility's output commodity capacity",
+ "index": 9,
+ "shape": [-1],
+ "tooltip": "output k-factor",
+ "type": "double",
+ "uilabel": "Output K-Factor"
+ },
+ "max_inv_size": {
+ "alias": "max_inv_size",
+ "default": 1.000000000000000e+299,
+ "doc": "total maximum inventory size of the k-facility",
+ "index": 6,
+ "shape": [-1],
+ "tooltip": "k-facility maximum inventory size",
+ "type": "double",
+ "uilabel": "Maximum Inventory"
+ },
+ "out_capacity": {
+ "alias": "out_capacity",
+ "doc": "number of commodity units that can be supplied at each timestep (infinite capacity can be represented by a very large number)",
+ "index": 4,
+ "shape": [-1],
+ "tooltip": "output commodity capacity",
+ "type": "double",
+ "uilabel": "Outgoing Throughput"
+ },
+ "out_commod": {
+ "alias": "out_commod",
+ "doc": "commodity that the k-facility supplies",
+ "index": 2,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "output commodity",
+ "type": "std::string",
+ "uilabel": "Output Commodity",
+ "uitype": "outcommodity"
+ },
+ "recipe_name": {
+ "alias": "recipe_name",
+ "doc": "recipe name for the k-facility's in-commodity",
+ "index": 1,
+ "schematype": "token",
+ "shape": [50],
+ "tooltip": "in-commodity recipe name",
+ "type": "std::string",
+ "uilabel": "Input Recipe",
+ "uitype": "recipe"
+ }
+ }
+ },
+ ":agents:NullInst": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Ider",
+ "cyclus::Institution",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener"
+ ],
+ "doc": "An instition that owns facilities in the simulation but exhibits null behavior. No parameters are given when using the null institution.",
+ "entity": "institution",
+ "name": "cyclus::NullInst",
+ "parents": ["cyclus::Institution"],
+ "vars": {}
+ },
+ ":agents:NullRegion": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Ider",
+ "cyclus::Region",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener"
+ ],
+ "doc": "A region that owns the simulation's institutions but exhibits null behavior. No parameters are given when using the null region.",
+ "entity": "region",
+ "name": "cyclus::NullRegion",
+ "parents": ["cyclus::Region"],
+ "vars": {}
+ },
+ ":agents:Predator": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader"
+ ],
+ "doc": "A facility that represents predators in the Lotka-Volterra integration tests",
+ "entity": "facility",
+ "name": "cyclus::Predator",
+ "parents": ["cyclus::Facility"],
+ "vars": {
+ "age": {
+ "alias": "age",
+ "default": 0,
+ "doc": "age of predator at beginning of simulation",
+ "index": 8,
+ "shape": [-1],
+ "tooltip": "predator age",
+ "type": "int",
+ "uilabel": "Predator Age"
+ },
+ "birth_and_death": {
+ "alias": "birth_and_death",
+ "default": 0,
+ "doc": "whether or not simultaneous birth and death are allowed (i.e., can a facility give birth and die in the same time step?)",
+ "index": 12,
+ "shape": [-1],
+ "tooltip": "simultaneous birth and death?",
+ "type": "bool",
+ "uilabel": "Simultaneous Birth and Death?"
+ },
+ "commod": {
+ "alias": "commod",
+ "doc": "commodity that the predator supplies",
+ "index": 0,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "predator commodity",
+ "type": "std::string",
+ "uilabel": "Predator Commodity",
+ "uitype": "outcommodity"
+ },
+ "consumed": {
+ "alias": "consumed",
+ "default": 0,
+ "doc": "how many units of prey consumed per time step",
+ "index": 6,
+ "shape": [-1],
+ "tooltip": "prey consumed",
+ "type": "double",
+ "uilabel": "Prey Consumed"
+ },
+ "dead": {
+ "alias": "dead",
+ "default": 0,
+ "doc": "flag for whether predator is currently dead",
+ "index": 10,
+ "shape": [-1],
+ "tooltip": "dead?",
+ "type": "bool",
+ "uilabel": "Predator Dead?"
+ },
+ "full": {
+ "alias": "full",
+ "default": 1,
+ "doc": "how many units of prey a predator consumes until it is satisfied",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "feast size",
+ "type": "double",
+ "uilabel": "Feast Size"
+ },
+ "hunt_cap": {
+ "alias": "hunt_cap",
+ "default": 1,
+ "doc": "how many units of prey a predator can catch during a hunt",
+ "index": 4,
+ "shape": [-1],
+ "tooltip": "hunting yield",
+ "type": "double",
+ "uilabel": "Hunting Yield"
+ },
+ "hunt_factor": {
+ "alias": "hunt_factor",
+ "default": 0,
+ "doc": "whether or not to base hunting success on relative predator/prey populations",
+ "index": 7,
+ "shape": [-1],
+ "tooltip": "hunting success factor",
+ "type": "bool",
+ "uilabel": "Hunting Success Factor"
+ },
+ "hunt_freq": {
+ "alias": "hunt_freq",
+ "default": 1,
+ "doc": "how often a predator needs to hunt",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "hunting frequency",
+ "type": "int",
+ "uilabel": "Hunting Frequency"
+ },
+ "lifespan": {
+ "alias": "lifespan",
+ "default": 1,
+ "doc": "how long a predator lives",
+ "index": 9,
+ "shape": [-1],
+ "tooltip": "predator lifespan",
+ "type": "int",
+ "uilabel": "Predator Lifespan"
+ },
+ "nchildren": {
+ "alias": "nchildren",
+ "default": 1,
+ "doc": "number of predator children born at each birthing instance",
+ "index": 11,
+ "shape": [-1],
+ "tooltip": "number of children",
+ "type": "double",
+ "uilabel": "Number Predator Children"
+ },
+ "prey": {
+ "alias": "prey",
+ "doc": "prey that the predator hunts",
+ "index": 1,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "predator's prey",
+ "type": "std::string",
+ "uilabel": "Prey Commodity",
+ "uitype": "incommodity"
+ },
+ "success": {
+ "alias": "success",
+ "default": 1,
+ "doc": "fraction of hunting success on a scale from 0 to 1",
+ "index": 5,
+ "shape": [-1],
+ "tooltip": "hunting success fraction",
+ "type": "double",
+ "uilabel": "Hunting Success Fraction"
+ }
+ }
+ },
+ ":agents:Prey": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader"
+ ],
+ "doc": "A facility that represents prey in the Lotka-Volterra integration tests",
+ "entity": "facility",
+ "name": "cyclus::Prey",
+ "parents": ["cyclus::Facility"],
+ "vars": {
+ "age": {
+ "alias": "age",
+ "default": 0,
+ "doc": "age of prey at start of simulation",
+ "index": 1,
+ "shape": [-1],
+ "tooltip": "prey age",
+ "type": "int",
+ "uilabel": "Prey Age"
+ },
+ "birth_and_death": {
+ "alias": "birth_and_death",
+ "default": 1,
+ "doc": "whether or not simultaneous birth and death are allowed (i.e., can a facility give birth and die in the same time step?)",
+ "index": 5,
+ "shape": [-1],
+ "tooltip": "simultaneous birth and death?",
+ "type": "bool",
+ "uilabel": "Simultaneous Birth and Death?"
+ },
+ "birth_freq": {
+ "alias": "birth_freq",
+ "default": 1,
+ "doc": "number of time steps between birth of children",
+ "index": 4,
+ "shape": [-1],
+ "tooltip": "birth frequency",
+ "type": "int",
+ "uilabel": "Birth Frequency"
+ },
+ "commod": {
+ "alias": "commod",
+ "index": 0,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "commod",
+ "type": "std::string",
+ "uilabel": "Prey Commodity",
+ "uitype": "outcommodity"
+ },
+ "dead": {
+ "alias": "dead",
+ "default": 0,
+ "doc": "flag for whether prey is currently dead",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "dead?",
+ "type": "bool",
+ "uilabel": "Dead?"
+ },
+ "nchildren": {
+ "alias": "nchildren",
+ "default": 1,
+ "doc": "number of children born at each birthing instance",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "number of children",
+ "type": "int",
+ "uilabel": "Number of Children"
+ }
+ }
+ },
+ ":agents:Sink": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader"
+ ],
+ "doc": "A minimum implementation sink facility that accepts specified amounts of commodities from other agents",
+ "entity": "facility",
+ "name": "cyclus::Sink",
+ "parents": ["cyclus::Facility"],
+ "vars": {
+ "capacity": {
+ "alias": "capacity",
+ "doc": "capacity the sink facility can accept at each time step",
+ "index": 3,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "sink capacity",
+ "type": "double",
+ "uilabel": "Maximum Throughput",
+ "uitype": "range"
+ },
+ "in_commods": {
+ "alias": ["in_commods", "val"],
+ "doc": "commodities that the sink facility accepts ",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["input commodities for the sink", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["List of Input Commodities", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "inventory": {
+ "alias": "inventory",
+ "capacity": "max_inv_size",
+ "index": 4,
+ "shape": [-1],
+ "tooltip": "inventory",
+ "type": "cyclus::toolkit::ResourceBuff",
+ "uilabel": "inventory"
+ },
+ "max_inv_size": {
+ "alias": "max_inv_size",
+ "default": 1.000000000000000e+299,
+ "doc": "total maximum inventory size of sink facility",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "sink maximum inventory size",
+ "type": "double",
+ "uilabel": "Maximum Inventory"
+ },
+ "recipe_name": {
+ "alias": "recipe_name",
+ "default": "",
+ "doc": "Name of recipe to request.If empty, sink requests material no particular composition.",
+ "index": 1,
+ "shape": [-1],
+ "tooltip": "input/request recipe name",
+ "type": "std::string",
+ "uilabel": "Input Recipe",
+ "uitype": "recipe"
+ }
+ }
+ },
+ ":agents:Source": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader"
+ ],
+ "doc": "A minimum implementation source facility that provides a commodity with a given capacity",
+ "entity": "facility",
+ "name": "cyclus::Source",
+ "parents": ["cyclus::Facility"],
+ "vars": {
+ "capacity": {
+ "alias": "capacity",
+ "doc": "amount of commodity that can be supplied at each time step",
+ "index": 2,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "source capacity",
+ "type": "double",
+ "uilabel": "Maximum Throughput",
+ "uitype": "range"
+ },
+ "commod": {
+ "alias": "commod",
+ "doc": "commodity that the source facility supplies",
+ "index": 0,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "source commodity",
+ "type": "std::string",
+ "uilabel": "Commodity",
+ "uitype": "outcommodity"
+ },
+ "recipe_name": {
+ "alias": "recipe_name",
+ "default": "",
+ "doc": "Recipe name for source facility's commodity.If empty, source supplies material with requested compositions.",
+ "index": 1,
+ "schematype": "token",
+ "shape": [-1],
+ "tooltip": "commodity recipe name",
+ "type": "std::string",
+ "uilabel": "Recipe",
+ "uitype": "recipe"
+ }
+ }
+ },
+ ":cycamore:DeployInst": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Ider",
+ "cyclus::Institution",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "Builds and manages agents (facilities) according to a manually specified deployment schedule. Deployed agents are automatically decommissioned at the end of their lifetime. The user specifies a list of prototypes for each and corresponding build times, number to build, and (optionally) lifetimes. The same prototype can be specified multiple times with any combination of the same or different build times, build number, and lifetimes. ",
+ "entity": "institution",
+ "name": "cycamore::DeployInst",
+ "parents": ["cyclus::Institution", "cyclus::toolkit::Position"],
+ "vars": {
+ "build_times": {
+ "alias": ["build_times", "val"],
+ "doc": "Time step on which to deploy agents given in prototype list (same order).",
+ "index": 1,
+ "shape": [-1, -1],
+ "tooltip": ["build_times", ""],
+ "type": ["std::vector", "int"],
+ "uilabel": ["Deployment times", ""]
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 4,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "lifetimes": {
+ "alias": ["lifetimes", "val"],
+ "default": [],
+ "doc": "Lifetimes for each prototype in prototype list (same order). These lifetimes override the lifetimes in the original prototype definition. If unspecified, lifetimes from the original prototype definitions are used. Although a new prototype is created in the Prototypes table for each lifetime with the suffix '_life_[lifetime]', all deployed agents themselves will have the same original prototype name (and so will the Agents tables).",
+ "index": 3,
+ "shape": [-1, -1],
+ "tooltip": ["lifetimes", ""],
+ "type": ["std::vector", "int"],
+ "uilabel": ["Lifetimes", ""]
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 5,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "n_build": {
+ "alias": ["n_build", "val"],
+ "doc": "Number of each prototype given in prototype list that should be deployed (same order).",
+ "index": 2,
+ "shape": [-1, -1],
+ "tooltip": ["n_build", ""],
+ "type": ["std::vector", "int"],
+ "uilabel": ["Number to deploy", ""]
+ },
+ "prototypes": {
+ "alias": ["prototypes", "val"],
+ "doc": "Ordered list of prototypes to build.",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["prototypes", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Prototypes to deploy", ""],
+ "uitype": ["oneormore", "prototype"]
+ }
+ }
+ },
+ ":cycamore:Enrichment": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "The Enrichment facility is a simple agent that enriches natural uranium in a Cyclus simulation. It does not explicitly compute the physical enrichment process, rather it calculates the SWU required to convert an source uranium recipe (i.e. natural uranium) into a requested enriched recipe (i.e. 4% enriched uranium), given the natural uranium inventory constraint and its SWU capacity constraint.\n\nThe Enrichment facility requests an input commodity and associated recipe whose quantity is its remaining inventory capacity. All facilities trading the same input commodity (even with different recipes) will offer materials for trade. The Enrichment facility accepts any input materials with enrichments less than its tails assay, as long as some U235 is present, and preference increases with U235 content. If no U235 is present in the offered material, the trade preference is set to -1 and the material is not accepted. Any material components other than U235 and U238 are sent directly to the tails buffer.\n\nThe Enrichment facility will bid on any request for its output commodity up to the maximum allowed enrichment (if not specified, default is 100%) It bids on either the request quantity, or the maximum quanity allowed by its SWU constraint or natural uranium inventory, whichever is lower. If multiple output commodities with different enrichment levels are requested and the facility does not have the SWU or quantity capacity to meet all requests, the requests are fully, then partially filled in unspecified but repeatable order.\n\nAccumulated tails inventory is offered for trading as a specifiable output commodity.",
+ "entity": "facility",
+ "name": "cycamore::Enrichment",
+ "niche": "enrichment facility",
+ "parents": ["cyclus::Facility", "cyclus::toolkit::Position"],
+ "vars": {
+ "feed_commod": {
+ "alias": "feed_commod",
+ "doc": "feed commodity that the enrichment facility accepts",
+ "index": 0,
+ "shape": [-1],
+ "tooltip": "feed commodity",
+ "type": "std::string",
+ "uilabel": "Feed Commodity",
+ "uitype": "incommodity"
+ },
+ "feed_recipe": {
+ "alias": "feed_recipe",
+ "doc": "recipe for enrichment facility feed commodity",
+ "index": 1,
+ "shape": [-1],
+ "tooltip": "feed recipe",
+ "type": "std::string",
+ "uilabel": "Feed Recipe",
+ "uitype": "inrecipe"
+ },
+ "initial_feed": {
+ "alias": "initial_feed",
+ "default": 0,
+ "doc": "amount of natural uranium stored at the enrichment facility at the beginning of the simulation (kg)",
+ "index": 5,
+ "shape": [-1],
+ "tooltip": "initial uranium reserves (kg)",
+ "type": "double",
+ "uilabel": "Initial Feed Inventory"
+ },
+ "inventory": {
+ "capacity": "max_feed_inventory",
+ "index": 10,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 12,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 13,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "max_enrich": {
+ "alias": "max_enrich",
+ "default": 1.0,
+ "doc": "maximum allowed weight fraction of U235 in product",
+ "index": 7,
+ "range": [0.0, 1.0],
+ "schema": " 0 1 ",
+ "shape": [-1],
+ "tooltip": "maximum allowed enrichment fraction",
+ "type": "double",
+ "uilabel": "Maximum Allowed Enrichment",
+ "uitype": "range"
+ },
+ "max_feed_inventory": {
+ "alias": "max_feed_inventory",
+ "default": 1.000000000000000e+299,
+ "doc": "maximum total inventory of natural uranium in the enrichment facility (kg)",
+ "index": 6,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "max inventory of feed material (kg)",
+ "type": "double",
+ "uilabel": "Maximum Feed Inventory",
+ "uitype": "range"
+ },
+ "order_prefs": {
+ "alias": "order_prefs",
+ "default": 1,
+ "doc": "turn on preference ordering for input material so that EF chooses higher U235 content first",
+ "index": 8,
+ "shape": [-1],
+ "tooltip": "Rank Material Requests by U235 Content",
+ "type": "bool",
+ "uilabel": "Prefer feed with higher U235 content",
+ "userlevel": 10
+ },
+ "product_commod": {
+ "alias": "product_commod",
+ "doc": "product commodity that the enrichment facility generates",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "product commodity",
+ "type": "std::string",
+ "uilabel": "Product Commodity",
+ "uitype": "outcommodity"
+ },
+ "swu_capacity": {
+ "alias": "swu_capacity",
+ "default": 1.000000000000000e+299,
+ "doc": "separative work unit (SWU) capacity of enrichment facility (kgSWU/timestep) ",
+ "index": 9,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "SWU capacity (kgSWU/month)",
+ "type": "double",
+ "uilabel": "SWU Capacity",
+ "uitype": "range"
+ },
+ "tails": {
+ "index": 11,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "tails_assay": {
+ "alias": "tails_assay",
+ "default": 0.0030,
+ "doc": "tails assay from the enrichment process",
+ "index": 4,
+ "range": [0.0, 0.0030],
+ "shape": [-1],
+ "tooltip": "tails assay",
+ "type": "double",
+ "uilabel": "Tails Assay",
+ "uitype": "range"
+ },
+ "tails_commod": {
+ "alias": "tails_commod",
+ "doc": "tails commodity supplied by enrichment facility",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "tails commodity",
+ "type": "std::string",
+ "uilabel": "Tails Commodity",
+ "uitype": "outcommodity"
+ }
+ }
+ },
+ ":cycamore:FuelFab": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "FuelFab takes in 2 streams of material and mixes them in ratios in order to supply material that matches some neutronics properties of reqeusted material. It uses an equivalence type method [1] inspired by a similar approach in the COSI fuel cycle simulator.\n\nThe FuelFab has 3 input inventories: fissile stream, filler stream, and an optional top-up inventory. All materials received into each inventory are always combined into a single material (i.e. a single fissile material, a single filler material, etc.). The input streams and requested fuel composition are each assigned weights based on summing:\n\n N * (p_i - p_U238) / (p_Pu239 - p_U238)\n\nfor each nuclide where:\n\n - p = nu*sigma_f - sigma_a for the nuclide\n - p_U238 is p for pure U238\n - p_Pu239 is p for pure Pu239\n - N is the nuclide's atom fraction\n - nu is the average # neutrons per fission\n - sigma_f is the microscopic fission cross-section\n - sigma_a is the microscopic neutron absorption cross-section\n\nThe cross sections are from the simple cross section library in PyNE. They can be set to either a thermal or fast neutron spectrum. A linear interpolation is performed using the weights of the fissile, filler, and target streams. The interpolation is used to compute a mixing ratio of the input streams that matches the target weight. In the event that the target weight is higher than the fissile stream weight, the FuelFab will attempt to use the top-up and fissile input streams together instead of the fissile and filler streams. All supplied material will always have the same weight as the requested material.\n\nThe supplying of mixed material is constrained by available inventory quantities and a per time step throughput limit. Requests for fuel material larger than the throughput can never be met. Fissile inventory can be requested/received via one or more commodities. The DRE request preference for each of these commodities can also optionally be specified. By default, the top-up inventory size is zero, and it is not used for mixing. \n\n[1] Baker, A. R., and R. W. Ross. \"Comparison of the value of plutonium and uranium isotopes in fast reactors.\" Proceedings of the Conference on Breeding. Economics, and Safety in Large Fast Power Reactors. 1963.",
+ "entity": "facility",
+ "name": "cycamore::FuelFab",
+ "niche": "fabrication",
+ "parents": ["cyclus::Facility", "cyclus::toolkit::Position"],
+ "vars": {
+ "fill": {
+ "capacity": "fill_size",
+ "index": 4,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "fill_commod_prefs": {
+ "alias": ["fill_commod_prefs", "val"],
+ "default": [],
+ "doc": "Filler stream commodity request preferences for each of the given filler commodities (same order). If unspecified, default is to use 1.0 for all preferences.",
+ "index": 1,
+ "shape": [-1, -1],
+ "tooltip": ["fill_commod_prefs", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["Filler Stream Preferences", ""]
+ },
+ "fill_commods": {
+ "alias": ["fill_commods", "val"],
+ "doc": "Ordered list of commodities on which to requesting filler stream material.",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["fill_commods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Filler Stream Commodities", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "fill_recipe": {
+ "alias": "fill_recipe",
+ "doc": "Name of recipe to be used in filler material stream requests.",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "fill_recipe",
+ "type": "std::string",
+ "uilabel": "Filler Stream Recipe",
+ "uitype": "inrecipe"
+ },
+ "fill_size": {
+ "alias": "fill_size",
+ "doc": "Size of filler material stream inventory.",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "fill_size",
+ "type": "double",
+ "uilabel": "Filler Stream Inventory Capacity",
+ "units": "kg"
+ },
+ "fiss": {
+ "capacity": "fiss_size",
+ "index": 9,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "fiss_commod_prefs": {
+ "alias": ["fiss_commod_prefs", "val"],
+ "default": [],
+ "doc": "Fissile stream commodity request preferences for each of the given fissile commodities (same order). If unspecified, default is to use 1.0 for all preferences.",
+ "index": 6,
+ "shape": [-1, -1],
+ "tooltip": ["fiss_commod_prefs", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["Fissile Stream Preferences", ""]
+ },
+ "fiss_commods": {
+ "alias": ["fiss_commods", "val"],
+ "doc": "Ordered list of commodities on which to requesting fissile stream material.",
+ "index": 5,
+ "shape": [-1, -1],
+ "tooltip": ["fiss_commods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Fissile Stream Commodities", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "fiss_recipe": {
+ "alias": "fiss_recipe",
+ "default": "",
+ "doc": "Name for recipe to be used in fissile stream requests. Empty string results in use of an empty dummy recipe.",
+ "index": 7,
+ "shape": [-1],
+ "tooltip": "fiss_recipe",
+ "type": "std::string",
+ "uilabel": "Fissile Stream Recipe",
+ "uitype": "inrecipe"
+ },
+ "fiss_size": {
+ "alias": "fiss_size",
+ "doc": "Size of fissile material stream inventory.",
+ "index": 8,
+ "shape": [-1],
+ "tooltip": "fiss_size",
+ "type": "double",
+ "uilabel": "Fissile Stream Inventory Capacity",
+ "units": "kg"
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 18,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 19,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "outcommod": {
+ "alias": "outcommod",
+ "doc": "Commodity on which to offer/supply mixed fuel material.",
+ "index": 15,
+ "shape": [-1],
+ "tooltip": "outcommod",
+ "type": "std::string",
+ "uilabel": "Output Commodity",
+ "uitype": "outcommodity"
+ },
+ "spectrum": {
+ "alias": "spectrum",
+ "categorical": ["fission_spectrum_ave", "thermal"],
+ "doc": "The type of cross-sections to use for composition property calculation. Use 'fission_spectrum_ave' for fast reactor compositions or 'thermal' for thermal reactors.",
+ "index": 17,
+ "shape": [-1],
+ "tooltip": "spectrum",
+ "type": "std::string",
+ "uilabel": "Spectrum type",
+ "uitype": "combobox"
+ },
+ "throughput": {
+ "alias": "throughput",
+ "default": 1.000000000000000e+299,
+ "doc": "Maximum number of kg of fuel material that can be supplied per time step.",
+ "index": 16,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "throughput",
+ "type": "double",
+ "uilabel": "Maximum Throughput",
+ "uitype": "range",
+ "units": "kg"
+ },
+ "topup": {
+ "capacity": "topup_size",
+ "index": 14,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "topup_commod": {
+ "alias": "topup_commod",
+ "default": "",
+ "doc": "Commodity on which to request material for top-up stream. This MUST be set if 'topup_size > 0'.",
+ "index": 10,
+ "shape": [-1],
+ "tooltip": "topup_commod",
+ "type": "std::string",
+ "uilabel": "Top-up Stream Commodity",
+ "uitype": "incommodity"
+ },
+ "topup_pref": {
+ "alias": "topup_pref",
+ "default": 1.0,
+ "doc": "Top-up material stream request preference.",
+ "index": 11,
+ "shape": [-1],
+ "tooltip": "topup_pref",
+ "type": "double",
+ "uilabel": "Top-up Stream Preference"
+ },
+ "topup_recipe": {
+ "alias": "topup_recipe",
+ "default": "",
+ "doc": "Name of recipe to be used in top-up material stream requests. This MUST be set if 'topup_size > 0'.",
+ "index": 12,
+ "shape": [-1],
+ "tooltip": "topup_recipe",
+ "type": "std::string",
+ "uilabel": "Top-up Stream Recipe",
+ "uitype": "inrecipe"
+ },
+ "topup_size": {
+ "alias": "topup_size",
+ "default": 0,
+ "doc": "Size of top-up material stream inventory.",
+ "index": 13,
+ "shape": [-1],
+ "tooltip": "topup_size",
+ "type": "double",
+ "uilabel": "Top-up Stream Inventory Capacity",
+ "units": "kg"
+ }
+ }
+ },
+ ":cycamore:GrowthRegion": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Ider",
+ "cyclus::Region",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "A region that governs a scenario in which there is growth in demand for a commodity. ",
+ "entity": "region",
+ "name": "cycamore::GrowthRegion",
+ "parents": ["cyclus::Region", "cyclus::toolkit::Position"],
+ "vars": {
+ "commodity_demand": {
+ "alias": [
+ ["growth", "item"],
+ "commod",
+ ["piecewise_function", ["piece", "start", ["function", "type", "params"]]]
+ ],
+ "doc": "Nameplate capacity demand functions.\n\nEach demand type must be for a commodity for which capacity can be built (e.g., 'power' from cycamore::Reactors). Any archetype that implements the cyclus::toolkit::CommodityProducer interface can interact with the GrowthRegion in the manner.\n\nDemand functions are defined as piecewise functions. Each piece must be provided a starting time and function description. Each function description is comprised of a function type and associated parameters. \n\n * Start times are inclusive. For a start time :math:`t_0`, the demand function is evaluated on :math:`[t_0, \\infty)`.\n\n * Supported function types are based on the `cyclus::toolkit::BasicFunctionFactory types `_. \n\n * The type name is the lower-case name of the function (e.g., 'linear', 'exponential', etc.).\n\n * The parameters associated with each function type can be found on their respective documentation pages.",
+ "index": 0,
+ "shape": [-1, -1, -1, -1, -1, -1, -1, -1],
+ "tooltip": [["commodity_demand", ""], "", ["", ["", "", ["", "", ""]]]],
+ "type": [
+ "std::map",
+ "std::string",
+ [
+ "std::vector",
+ ["std::pair", "int", ["std::pair", "std::string", "std::string"]]
+ ]
+ ],
+ "uilabel": [["Growth Demand Curves", ""], "", ["", ["", "", ["", "", ""]]]],
+ "uitype": [
+ "oneormore",
+ "string",
+ ["oneormore", ["pair", "int", ["pair", "string", "string"]]]
+ ]
+ },
+ "growth": "commodity_demand",
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ }
+ }
+ },
+ ":cycamore:ManagerInst": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Ider",
+ "cyclus::Institution",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::toolkit::AgentManaged",
+ "cyclus::toolkit::Builder",
+ "cyclus::toolkit::CommodityProducerManager",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "An institution that owns and operates a manually entered list of facilities in the input file",
+ "entity": "institution",
+ "name": "cycamore::ManagerInst",
+ "parents": [
+ "cyclus::Institution",
+ "cyclus::toolkit::Builder",
+ "cyclus::toolkit::CommodityProducerManager",
+ "cyclus::toolkit::Position"
+ ],
+ "vars": {
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 1,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "prototypes": {
+ "alias": ["prototypes", "val"],
+ "doc": "A set of facility prototypes that this institution can build. All prototypes in this list must be based on an archetype that implements the cyclus::toolkit::CommodityProducer interface",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["producer facility prototypes", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Producer Prototype List", ""],
+ "uitype": ["oneormore", "prototype"]
+ }
+ }
+ },
+ ":cycamore:Mixer": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "Mixer mixes N streams with fixed, static, user-specified ratios into a single output stream. The Mixer has N input inventories: one for each streams to be mixed, and one output stream. The supplying of mixed material is constrained by available inventory of mixed material quantities.",
+ "entity": "facility",
+ "name": "cycamore::Mixer",
+ "niche": "mixing facility",
+ "parents": ["cyclus::Facility", "cyclus::toolkit::Position"],
+ "vars": {
+ "in_streams": "streams_",
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 6,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 7,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "out_buf_size": {
+ "alias": "out_buf_size",
+ "default": 1.000000000000000e+299,
+ "doc": "Maximum amount of mixed material that can be stored. If full, the facility halts operation until space becomes available.",
+ "index": 3,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "out_buf_size",
+ "type": "double",
+ "uilabel": "Maximum Leftover Inventory",
+ "uitype": "range",
+ "units": "kg"
+ },
+ "out_commod": {
+ "alias": "out_commod",
+ "doc": "Commodity on which to offer/supply mixed fuel material.",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "out_commod",
+ "type": "std::string",
+ "uilabel": "Output Commodity",
+ "uitype": "outcommodity"
+ },
+ "output": {
+ "capacity": "out_buf_size",
+ "index": 4,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "streams_": {
+ "alias": [
+ "in_streams",
+ [
+ "stream",
+ ["info", "mixing_ratio", "buf_size"],
+ [["commodities", "item"], "commodity", "pref"]
+ ]
+ ],
+ "doc": "",
+ "index": 0,
+ "shape": [-1, -1, -1, -1, -1, -1, -1, -1],
+ "tooltip": ["streams_", ["", ["", "", ""], [["", ""], "", ""]]],
+ "type": [
+ "std::vector",
+ [
+ "std::pair",
+ ["std::pair", "double", "double"],
+ ["std::map", "std::string", "double"]
+ ]
+ ],
+ "uilabel": ["streams_", ["", ["", "", ""], [["", ""], "", ""]]],
+ "uitype": [
+ "oneormore",
+ ["pair", ["pair", "double", "double"], ["oneormore", "incommodity", "double"]]
+ ]
+ },
+ "throughput": {
+ "alias": "throughput",
+ "default": 1.000000000000000e+299,
+ "doc": "Maximum number of kg of fuel material that can be mixed per time step.",
+ "index": 5,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "throughput",
+ "type": "double",
+ "uilabel": "Maximum Throughput",
+ "uitype": "range",
+ "units": "kg"
+ }
+ }
+ },
+ ":cycamore:Reactor": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::AgentManaged",
+ "cyclus::toolkit::CommodityProducer",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "Reactor is a simple, general reactor based on static compositional transformations to model fuel burnup. The user specifies a set of input fuels and corresponding burnt compositions that fuel is transformed to when it is discharged from the core. No incremental transmutation takes place. Rather, at the end of an operational cycle, the batch being discharged from the core is instantaneously transmuted from its original fresh fuel composition into its spent fuel form.\n\nEach fuel is identified by a specific input commodity and has an associated input recipe (nuclide composition), output recipe, output commidity, and preference. The preference identifies which input fuels are preferred when requesting. Changes in these preferences can be specified as a function of time using the pref_change variables. Changes in the input-output recipe compositions can also be specified as a function of time using the recipe_change variables.\n\nThe reactor treats fuel as individual assemblies that are never split, combined or otherwise treated in any non-discrete way. Fuel is requested in full-or-nothing assembly sized quanta. If real-world assembly modeling is unnecessary, parameters can be adjusted (e.g. n_assem_core, assem_size, n_assem_batch). At the end of every cycle, a full batch is discharged from the core consisting of n_assem_batch assemblies of assem_size kg. The reactor also has a specifiable refueling time period following the end of each cycle at the end of which it will resume operation on the next cycle *if* it has enough fuel for a full core; otherwise it waits until it has enough fresh fuel assemblies.\n\nIn addition to its core, the reactor has an on-hand fresh fuel inventory and a spent fuel inventory whose capacities are specified by n_assem_fresh and n_assem_spent respectively. Each time step the reactor will attempt to acquire enough fresh fuel to fill its fresh fuel inventory (and its core if the core isn't currently full). If the fresh fuel inventory has zero capacity, fuel will be ordered just-in-time after the end of each operational cycle before the next begins. If the spent fuel inventory becomes full, the reactor will halt operation at the end of the next cycle until there is more room. Each time step, the reactor will try to trade away as much of its spent fuel inventory as possible.\n\nWhen the reactor reaches the end of its lifetime, it will discharge all material from its core and trade away all its spent fuel as quickly as possible. Full decommissioning will be delayed until all spent fuel is gone. If the reactor has a full core when it is decommissioned (i.e. is mid-cycle) when the reactor is decommissioned, half (rounded up to nearest int) of its assemblies are transmuted to their respective burnt compositions.",
+ "entity": "facility",
+ "name": "cycamore::Reactor",
+ "niche": "reactor",
+ "parents": [
+ "cyclus::Facility",
+ "cyclus::toolkit::CommodityProducer",
+ "cyclus::toolkit::Position"
+ ],
+ "vars": {
+ "assem_size": {
+ "alias": "assem_size",
+ "doc": "Mass (kg) of a single assembly.",
+ "index": 9,
+ "range": [1.0, 100000.0],
+ "shape": [-1],
+ "tooltip": "assem_size",
+ "type": "double",
+ "uilabel": "Assembly Mass",
+ "uitype": "range",
+ "units": "kg"
+ },
+ "core": {
+ "capacity": "n_assem_core * assem_size",
+ "index": 27,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "cycle_step": {
+ "alias": "cycle_step",
+ "default": 0,
+ "doc": "Number of time steps since the start of the last cycle. Only set this if you know what you are doing",
+ "index": 16,
+ "shape": [-1],
+ "tooltip": "cycle_step",
+ "type": "int",
+ "uilabel": "Time Since Start of Last Cycle",
+ "units": "time steps"
+ },
+ "cycle_time": {
+ "alias": "cycle_time",
+ "default": 18,
+ "doc": "The duration of a full operational cycle (excluding refueling time) in time steps.",
+ "index": 14,
+ "shape": [-1],
+ "tooltip": "cycle_time",
+ "type": "int",
+ "uilabel": "Cycle Length",
+ "units": "time steps"
+ },
+ "decom_transmute_all": {
+ "alias": "decom_transmute_all",
+ "default": 0,
+ "doc": "If true, the archetype transmutes all assemblies upon decommissioning If false, the archetype only transmutes half.",
+ "index": 22,
+ "shape": [-1],
+ "tooltip": "decom_transmute_all",
+ "type": "bool",
+ "uilabel": "Boolean for transmutation behavior upon decommissioning."
+ },
+ "discharged": {
+ "alias": "discharged",
+ "default": 0,
+ "doc": "This should NEVER be set manually",
+ "index": 29,
+ "internal": true,
+ "shape": [-1],
+ "tooltip": "discharged",
+ "type": "bool",
+ "uilabel": "discharged"
+ },
+ "fresh": {
+ "capacity": "n_assem_fresh * assem_size",
+ "index": 26,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "fuel_incommods": {
+ "alias": ["fuel_incommods", "val"],
+ "doc": "Ordered list of input commodities on which to requesting fuel.",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["fuel_incommods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Fresh Fuel Commodity List", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "fuel_inrecipes": {
+ "alias": ["fuel_inrecipes", "val"],
+ "doc": "Fresh fuel recipes to request for each of the given fuel input commodities (same order).",
+ "index": 1,
+ "shape": [-1, -1],
+ "tooltip": ["fuel_inrecipes", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Fresh Fuel Recipe List", ""],
+ "uitype": ["oneormore", "inrecipe"]
+ },
+ "fuel_outcommods": {
+ "alias": ["fuel_outcommods", "val"],
+ "doc": "Output commodities on which to offer spent fuel originally received as each particular input commodity (same order).",
+ "index": 3,
+ "shape": [-1, -1],
+ "tooltip": ["fuel_outcommods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Spent Fuel Commodity List", ""],
+ "uitype": ["oneormore", "outcommodity"]
+ },
+ "fuel_outrecipes": {
+ "alias": ["fuel_outrecipes", "val"],
+ "doc": "Spent fuel recipes corresponding to the given fuel input commodities (same order). Fuel received via a particular input commodity is transmuted to the recipe specified here after being burned during a cycle.",
+ "index": 4,
+ "shape": [-1, -1],
+ "tooltip": ["fuel_outrecipes", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Spent Fuel Recipe List", ""],
+ "uitype": ["oneormore", "outrecipe"]
+ },
+ "fuel_prefs": {
+ "alias": ["fuel_prefs", "val"],
+ "default": [],
+ "doc": "The preference for each type of fresh fuel requested corresponding to each input commodity (same order). If no preferences are specified, 1.0 is used for all fuel requests (default).",
+ "index": 2,
+ "shape": [-1, -1],
+ "tooltip": ["fuel_prefs", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["Fresh Fuel Preference List", ""]
+ },
+ "hybrid_": {
+ "alias": "hybrid_",
+ "default": 1,
+ "doc": "True if reactor is a hybrid system (produces side products)",
+ "index": 21,
+ "internal": true,
+ "shape": [-1],
+ "tooltip": "hybrid_",
+ "type": "bool",
+ "uilabel": "hybrid_"
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 31,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 32,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "n_assem_batch": {
+ "alias": "n_assem_batch",
+ "doc": "Number of assemblies that constitute a single batch. This is the number of assemblies discharged from the core fully burned each cycle.Batch size is equivalent to ``n_assem_batch / n_assem_core``.",
+ "index": 10,
+ "shape": [-1],
+ "tooltip": "n_assem_batch",
+ "type": "int",
+ "uilabel": "Number of Assemblies per Batch"
+ },
+ "n_assem_core": {
+ "alias": "n_assem_core",
+ "default": 3,
+ "doc": "Number of assemblies that constitute a full core.",
+ "index": 11,
+ "range": [1, 3],
+ "shape": [-1],
+ "tooltip": "n_assem_core",
+ "type": "int",
+ "uilabel": "Number of Assemblies in Core",
+ "uitype": "range"
+ },
+ "n_assem_fresh": {
+ "alias": "n_assem_fresh",
+ "default": 0,
+ "doc": "Number of fresh fuel assemblies to keep on-hand if possible.",
+ "index": 12,
+ "range": [0, 3],
+ "shape": [-1],
+ "tooltip": "n_assem_fresh",
+ "type": "int",
+ "uilabel": "Minimum Fresh Fuel Inventory",
+ "uitype": "range",
+ "units": "assemblies"
+ },
+ "n_assem_spent": {
+ "alias": "n_assem_spent",
+ "default": 1000000000,
+ "doc": "Number of spent fuel assemblies that can be stored on-site before reactor operation stalls.",
+ "index": 13,
+ "range": [0, 1000000000],
+ "shape": [-1],
+ "tooltip": "n_assem_spent",
+ "type": "int",
+ "uilabel": "Maximum Spent Fuel Inventory",
+ "uitype": "range",
+ "units": "assemblies"
+ },
+ "power_cap": {
+ "alias": "power_cap",
+ "default": 0,
+ "doc": "Amount of electrical power the facility produces when operating normally.",
+ "index": 17,
+ "range": [0.0, 2000.0],
+ "shape": [-1],
+ "tooltip": "power_cap",
+ "type": "double",
+ "uilabel": "Nominal Reactor Power",
+ "uitype": "range",
+ "units": "MWe"
+ },
+ "power_name": {
+ "alias": "power_name",
+ "default": "power",
+ "doc": "The name of the 'power' commodity used in conjunction with a deployment curve.",
+ "index": 18,
+ "shape": [-1],
+ "tooltip": "power_name",
+ "type": "std::string",
+ "uilabel": "Power Commodity Name"
+ },
+ "pref_change_commods": {
+ "alias": ["pref_change_commods", "val"],
+ "default": [],
+ "doc": "The input commodity for a particular fuel preference change. Same order as and direct correspondence to the specified preference change times.",
+ "index": 24,
+ "shape": [-1, -1],
+ "tooltip": ["pref_change_commods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Commodity for Changed Fresh Fuel Preference", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "pref_change_times": {
+ "alias": ["pref_change_times", "val"],
+ "default": [],
+ "doc": "A time step on which to change the request preference for a particular fresh fuel type.",
+ "index": 23,
+ "shape": [-1, -1],
+ "tooltip": ["pref_change_times", ""],
+ "type": ["std::vector", "int"],
+ "uilabel": ["Time to Change Fresh Fuel Preference", ""]
+ },
+ "pref_change_values": {
+ "alias": ["pref_change_values", "val"],
+ "default": [],
+ "doc": "The new/changed request preference for a particular fresh fuel. Same order as and direct correspondence to the specified preference change times.",
+ "index": 25,
+ "shape": [-1, -1],
+ "tooltip": ["pref_change_values", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["Changed Fresh Fuel Preference", ""]
+ },
+ "recipe_change_commods": {
+ "alias": ["recipe_change_commods", "val"],
+ "default": [],
+ "doc": "The input commodity indicating fresh fuel for which recipes will be changed. Same order as and direct correspondence to the specified recipe change times.",
+ "index": 6,
+ "shape": [-1, -1],
+ "tooltip": ["recipe_change_commods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Commodity for Changed Fresh/Spent Fuel Recipe", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "recipe_change_in": {
+ "alias": ["recipe_change_in", "val"],
+ "default": [],
+ "doc": "The new input recipe to use for this recipe change. Same order as and direct correspondence to the specified recipe change times.",
+ "index": 7,
+ "shape": [-1, -1],
+ "tooltip": ["recipe_change_in", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["New Recipe for Fresh Fuel", ""],
+ "uitype": ["oneormore", "inrecipe"]
+ },
+ "recipe_change_out": {
+ "alias": ["recipe_change_out", "val"],
+ "default": [],
+ "doc": "The new output recipe to use for this recipe change. Same order as and direct correspondence to the specified recipe change times.",
+ "index": 8,
+ "shape": [-1, -1],
+ "tooltip": ["recipe_change_out", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["New Recipe for Spent Fuel", ""],
+ "uitype": ["oneormore", "outrecipe"]
+ },
+ "recipe_change_times": {
+ "alias": ["recipe_change_times", "val"],
+ "default": [],
+ "doc": "A time step on which to change the input-output recipe pair for a requested fresh fuel.",
+ "index": 5,
+ "shape": [-1, -1],
+ "tooltip": ["recipe_change_times", ""],
+ "type": ["std::vector", "int"],
+ "uilabel": ["Time to Change Fresh/Spent Fuel Recipe", ""]
+ },
+ "refuel_time": {
+ "alias": "refuel_time",
+ "default": 1,
+ "doc": "The duration of a full refueling period - the minimum time between the end of a cycle and the start of the next cycle.",
+ "index": 15,
+ "shape": [-1],
+ "tooltip": "refuel_time",
+ "type": "int",
+ "uilabel": "Refueling Outage Duration",
+ "units": "time steps"
+ },
+ "res_indexes": {
+ "alias": [["res_indexes", "item"], "key", "val"],
+ "default": {},
+ "doc": "This should NEVER be set manually",
+ "index": 30,
+ "internal": true,
+ "shape": [-1, -1, -1],
+ "tooltip": [["res_indexes", ""], "", ""],
+ "type": ["std::map", "int", "int"],
+ "uilabel": [["res_indexes", ""], "", ""]
+ },
+ "side_product_quantity": {
+ "alias": ["side_product_quantity", "val"],
+ "default": [],
+ "doc": "Ordered vector of the quantity of side product the reactor produces with power",
+ "index": 20,
+ "shape": [-1, -1],
+ "tooltip": ["side_product_quantity", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["Quantity of Side Product from Reactor Plant", ""]
+ },
+ "side_products": {
+ "alias": ["side_products", "val"],
+ "default": [],
+ "doc": "Ordered vector of side product the reactor produces with power",
+ "index": 19,
+ "shape": [-1, -1],
+ "tooltip": ["side_products", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Side Product from Reactor Plant", ""]
+ },
+ "spent": {
+ "capacity": "n_assem_spent * assem_size",
+ "index": 28,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ }
+ }
+ },
+ ":cycamore:Separations": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "Separations processes feed material into one or more streams containing specific elements and/or nuclides. It uses mass-based efficiencies.\n\nUser defined separations streams are specified as groups of component-efficiency pairs where 'component' means either a particular element or a particular nuclide. Each component's paired efficiency represents the mass fraction of that component in the feed that is separated into that stream. The efficiencies of a particular component across all streams must sum up to less than or equal to one. If less than one, the remainining material is sent to a waste inventory and (potentially) traded away from there.\n\nThe facility receives material into a feed inventory that it processes with a specified throughput each time step. Each output stream has a corresponding output inventory size/limit. If the facility is unable to reduce its stocks by trading and hits this limit for any of its output streams, further processing/separations of feed material will halt until room is again available in the output streams.",
+ "entity": "facility",
+ "name": "cycamore::Separations",
+ "niche": "separations",
+ "parents": ["cyclus::Facility", "cyclus::toolkit::Position"],
+ "vars": {
+ "feed": {
+ "capacity": "feedbuf_size",
+ "index": 4,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "feed_commod_prefs": {
+ "alias": ["feed_commod_prefs", "val"],
+ "default": [],
+ "doc": "Feed commodity request preferences for each of the given feed commodities (same order). If unspecified, default is to use 1.0 for all preferences.",
+ "index": 1,
+ "shape": [-1, -1],
+ "tooltip": ["feed_commod_prefs", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["Feed Commodity Preference List", ""]
+ },
+ "feed_commods": {
+ "alias": ["feed_commods", "val"],
+ "doc": "Ordered list of commodities on which to request feed material to separate. Order only matters for matching up with feed commodity preferences if specified.",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["feed_commods", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Feed Commodity List", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "feed_recipe": {
+ "alias": "feed_recipe",
+ "default": "",
+ "doc": "Name for recipe to be used in feed requests. Empty string results in use of a dummy recipe.",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "feed_recipe",
+ "type": "std::string",
+ "uilabel": "Feed Commodity Recipe List",
+ "uitype": "inrecipe"
+ },
+ "feedbuf_size": {
+ "alias": "feedbuf_size",
+ "doc": "Maximum amount of feed material to keep on hand.",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "feedbuf_size",
+ "type": "double",
+ "uilabel": "Maximum Feed Inventory",
+ "units": "kg"
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 11,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "leftover": {
+ "capacity": "leftoverbuf_size",
+ "index": 8,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "leftover_commod": {
+ "alias": "leftover_commod",
+ "default": "default-waste-stream",
+ "doc": "Commodity on which to trade the leftover separated material stream. This MUST NOT be the same as any commodity used to define the other separations streams.",
+ "index": 6,
+ "shape": [-1],
+ "tooltip": "leftover_commod",
+ "type": "std::string",
+ "uilabel": "Leftover Commodity",
+ "uitype": "outcommodity"
+ },
+ "leftoverbuf_size": {
+ "alias": "leftoverbuf_size",
+ "default": 1.000000000000000e+299,
+ "doc": "Maximum amount of leftover separated material (not included in any other stream) that can be stored. If full, the facility halts operation until space becomes available.",
+ "index": 7,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "leftoverbuf_size",
+ "type": "double",
+ "uilabel": "Maximum Leftover Inventory",
+ "uitype": "range",
+ "units": "kg"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 12,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "streams": "streams_",
+ "streams_": {
+ "alias": [
+ ["streams", "item"],
+ "commod",
+ ["info", "buf_size", [["efficiencies", "item"], "comp", "eff"]]
+ ],
+ "doc": "Output streams for separations. Each stream must have a unique name identifying the commodity on which its material is traded, a max buffer capacity in kg (neg values indicate infinite size), and a set of component efficiencies. 'comp' is a component to be separated into the stream (e.g. U, Pu, etc.) and 'eff' is the mass fraction of the component that is separated from the feed into this output stream. If any stream buffer is full, the facility halts operation until space becomes available. The sum total of all component efficiencies across streams must be less than or equal to 1 (e.g. sum of U efficiencies for all streams must be <= 1).",
+ "index": 9,
+ "shape": [-1, -1, -1, -1, -1, -1, -1],
+ "tooltip": [["streams_", ""], "", ["", "", [["", ""], "", ""]]],
+ "type": [
+ "std::map",
+ "std::string",
+ ["std::pair", "double", ["std::map", "int", "double"]]
+ ],
+ "uilabel": [["Separations Streams and Efficiencies", ""], "", ["", "", [["", ""], "", ""]]],
+ "uitype": [
+ "oneormore",
+ "outcommodity",
+ ["pair", "double", ["oneormore", "nuclide", "double"]]
+ ]
+ },
+ "throughput": {
+ "alias": "throughput",
+ "default": 1.000000000000000e+299,
+ "doc": "Maximum quantity of feed material that can be processed per time step.",
+ "index": 5,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "throughput",
+ "type": "double",
+ "uilabel": "Maximum Separations Throughput",
+ "uitype": "range",
+ "units": "kg/(time step)"
+ }
+ }
+ },
+ ":cycamore:Sink": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": " A sink facility that accepts materials and products with a fixed\n throughput (per time step) capacity and a lifetime capacity defined by\n a total inventory size. The inventory size and throughput capacity\n both default to infinite. If a recipe is provided, it will request\n material with that recipe. Requests are made for any number of\n specified commodities.\n",
+ "entity": "facility",
+ "name": "cycamore::Sink",
+ "parents": ["cyclus::Facility", "cyclus::toolkit::Position"],
+ "vars": {
+ "capacity": {
+ "alias": "capacity",
+ "default": 1.000000000000000e+299,
+ "doc": "capacity the sink facility can accept at each time step",
+ "index": 4,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "sink capacity",
+ "type": "double",
+ "uilabel": "Maximum Throughput",
+ "uitype": "range"
+ },
+ "in_commod_prefs": {
+ "alias": ["in_commod_prefs", "val"],
+ "default": [],
+ "doc": "preferences for each of the given commodities, in the same order.Defauts to 1 if unspecified",
+ "index": 1,
+ "range": [null, [1.000000000000000e-299, 1.000000000000000e+299]],
+ "shape": [-1, -1],
+ "tooltip": ["in_commod_prefs", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["In Commody Preferences", ""],
+ "uitype": ["oneormore", "range"]
+ },
+ "in_commods": {
+ "alias": ["in_commods", "val"],
+ "doc": "commodities that the sink facility accepts",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["input commodities", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["List of Input Commodities", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "inventory": {
+ "capacity": "max_inv_size",
+ "index": 5,
+ "shape": [-1, -1],
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Resource"]
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 6,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 7,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "max_inv_size": {
+ "alias": "max_inv_size",
+ "default": 1.000000000000000e+299,
+ "doc": "total maximum inventory size of sink facility",
+ "index": 3,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "sink maximum inventory size",
+ "type": "double",
+ "uilabel": "Maximum Inventory",
+ "uitype": "range"
+ },
+ "recipe_name": {
+ "alias": "recipe_name",
+ "default": "",
+ "doc": "name of recipe to use for material requests, where the default (empty string) is to accept everything",
+ "index": 2,
+ "shape": [-1],
+ "tooltip": "requested composition",
+ "type": "std::string",
+ "uilabel": "Input Recipe",
+ "uitype": "inrecipe"
+ }
+ }
+ },
+ ":cycamore:Source": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::AgentManaged",
+ "cyclus::toolkit::CommodityProducer",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "This facility acts as a source of material with a fixed throughput (per\ntime step) capacity and a lifetime capacity defined by a total inventory\nsize. It offers its material as a single commodity. If a composition\nrecipe is specified, it provides that single material composition to\nrequesters. If unspecified, the source provides materials with the exact\nrequested compositions. The inventory size and throughput both default to\ninfinite. Supplies material results in corresponding decrease in\ninventory, and when the inventory size reaches zero, the source can provide\nno more material.\n",
+ "entity": "facility",
+ "name": "cycamore::Source",
+ "parents": [
+ "cyclus::Facility",
+ "cyclus::toolkit::CommodityProducer",
+ "cyclus::toolkit::Position"
+ ],
+ "vars": {
+ "inventory_size": {
+ "alias": "inventory_size",
+ "default": 1.000000000000000e+299,
+ "doc": "Total amount of material this source has remaining. Every trade decreases this value by the supplied material quantity. When it reaches zero, the source cannot provide any more material.",
+ "index": 2,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "inventory_size",
+ "type": "double",
+ "uilabel": "Initial Inventory",
+ "uitype": "range",
+ "units": "kg"
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 4,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 5,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "outcommod": {
+ "alias": "outcommod",
+ "doc": "Output commodity on which the source offers material.",
+ "index": 0,
+ "shape": [-1],
+ "tooltip": "source output commodity",
+ "type": "std::string",
+ "uilabel": "Output Commodity",
+ "uitype": "outcommodity"
+ },
+ "outrecipe": {
+ "alias": "outrecipe",
+ "default": "",
+ "doc": "Name of composition recipe that this source provides regardless of requested composition. If empty, source creates and provides whatever compositions are requested.",
+ "index": 1,
+ "shape": [-1],
+ "tooltip": "name of material recipe to provide",
+ "type": "std::string",
+ "uilabel": "Output Recipe",
+ "uitype": "outrecipe"
+ },
+ "throughput": {
+ "alias": "throughput",
+ "default": 1.000000000000000e+299,
+ "doc": "amount of commodity that can be supplied at each time step",
+ "index": 3,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "per time step throughput",
+ "type": "double",
+ "uilabel": "Maximum Throughput",
+ "uitype": "range",
+ "units": "kg/(time step)"
+ }
+ }
+ },
+ ":cycamore:Storage": {
+ "all_parents": [
+ "cyclus::Agent",
+ "cyclus::Facility",
+ "cyclus::Ider",
+ "cyclus::StateWrangler",
+ "cyclus::TimeListener",
+ "cyclus::Trader",
+ "cyclus::toolkit::AgentManaged",
+ "cyclus::toolkit::CommodityProducer",
+ "cyclus::toolkit::Position"
+ ],
+ "doc": "Storage is a simple facility which accepts any number of commodities and holds them for a user specified amount of time. The commodities accepted are chosen based on the specified preferences list. Once the desired amount of material has entered the facility it is passed into a 'processing' buffer where it is held until the residence time has passed. The material is then passed into a 'ready' buffer where it is queued for removal. Currently, all input commodities are lumped into a single output commodity. Storage also has the functionality to handle materials in discrete or continuous batches. Discrete mode, which is the default, does not split or combine material batches. Continuous mode, however, divides material batches if necessary in order to push materials through the facility as quickly as possible.",
+ "entity": "facility",
+ "name": "storage::Storage",
+ "parents": [
+ "cyclus::Facility",
+ "cyclus::toolkit::CommodityProducer",
+ "cyclus::toolkit::Position"
+ ],
+ "vars": {
+ "discrete_handling": {
+ "alias": "discrete_handling",
+ "default": false,
+ "doc": "Determines if Storage will divide resource objects. Only controls material handling within this facility, has no effect on DRE material handling. If true, batches are handled as discrete quanta, neither split nor combined. Otherwise, batches may be divided during processing. Default to false (continuous))",
+ "index": 7,
+ "shape": [-1],
+ "tooltip": "Bool to determine how Storage handles batches",
+ "type": "bool",
+ "uilabel": "Batch Handling"
+ },
+ "in_commod_prefs": {
+ "alias": ["in_commod_prefs", "val"],
+ "default": [],
+ "doc": "preferences for each of the given commodities, in the same order.Defauts to 1 if unspecified",
+ "index": 1,
+ "range": [null, [1.000000000000000e-299, 1.000000000000000e+299]],
+ "shape": [-1, -1],
+ "tooltip": ["in_commod_prefs", ""],
+ "type": ["std::vector", "double"],
+ "uilabel": ["In Commody Preferences", ""],
+ "uitype": ["oneormore", "range"]
+ },
+ "in_commods": {
+ "alias": ["in_commods", "val"],
+ "doc": "commodities accepted by this facility",
+ "index": 0,
+ "shape": [-1, -1],
+ "tooltip": ["input commodity", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Input Commodities", ""],
+ "uitype": ["oneormore", "incommodity"]
+ },
+ "in_recipe": {
+ "alias": "in_recipe",
+ "default": "",
+ "doc": "recipe accepted by this facility, if unspecified a dummy recipe is used",
+ "index": 3,
+ "shape": [-1],
+ "tooltip": "input recipe",
+ "type": "std::string",
+ "uilabel": "Input Recipe",
+ "uitype": "inrecipe"
+ },
+ "inventory": {
+ "index": 8,
+ "shape": [-1, -1],
+ "tooltip": "Incoming material buffer",
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "latitude": {
+ "alias": "latitude",
+ "default": 0.0,
+ "doc": "Latitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 12,
+ "shape": [-1],
+ "tooltip": "latitude",
+ "type": "double",
+ "uilabel": "Geographical latitude in degrees as a double"
+ },
+ "longitude": {
+ "alias": "longitude",
+ "default": 0.0,
+ "doc": "Longitude of the agent's geographical position. The value should be expressed in degrees as a double.",
+ "index": 13,
+ "shape": [-1],
+ "tooltip": "longitude",
+ "type": "double",
+ "uilabel": "Geographical longitude in degrees as a double"
+ },
+ "max_inv_size": {
+ "alias": "max_inv_size",
+ "default": 1.000000000000000e+299,
+ "doc": "the maximum amount of material that can be in all storage buffer stages",
+ "index": 6,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "maximum inventory size (kg)",
+ "type": "double",
+ "uilabel": "Maximum Inventory Size",
+ "uitype": "range",
+ "units": "kg"
+ },
+ "out_commods": {
+ "alias": ["out_commods", "val"],
+ "doc": "commodity produced by this facility. Multiple commodity tracking is currently not supported, one output commodity catches all input commodities.",
+ "index": 2,
+ "shape": [-1, -1],
+ "tooltip": ["output commodity", ""],
+ "type": ["std::vector", "std::string"],
+ "uilabel": ["Output Commodities", ""],
+ "uitype": ["oneormore", "outcommodity"]
+ },
+ "processing": {
+ "index": 11,
+ "shape": [-1, -1],
+ "tooltip": "Buffer for material still waiting for required residence_time",
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "ready": {
+ "index": 10,
+ "shape": [-1, -1],
+ "tooltip": "Buffer for material held for required residence_time",
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "residence_time": {
+ "alias": "residence_time",
+ "default": 0,
+ "doc": "the minimum holding time for a received commodity (timesteps).",
+ "index": 4,
+ "range": [0, 12000],
+ "shape": [-1],
+ "tooltip": "residence time (timesteps)",
+ "type": "int",
+ "uilabel": "Residence Time",
+ "uitype": "range",
+ "units": "time steps"
+ },
+ "stocks": {
+ "index": 9,
+ "shape": [-1, -1],
+ "tooltip": "Output material buffer",
+ "type": ["cyclus::toolkit::ResBuf", "cyclus::Material"]
+ },
+ "throughput": {
+ "alias": "throughput",
+ "default": 1.000000000000000e+299,
+ "doc": "the max amount that can be moved through the facility per timestep (kg)",
+ "index": 5,
+ "range": [0.0, 1.000000000000000e+299],
+ "shape": [-1],
+ "tooltip": "throughput per timestep (kg)",
+ "type": "double",
+ "uilabel": "Throughput",
+ "uitype": "range",
+ "units": "kg"
+ }
+ }
+ }
+ },
+ "schema": {
+ ":agents:KFacility": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":agents:NullInst": "\n",
+ ":agents:NullRegion": "\n",
+ ":agents:Predator": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":agents:Prey": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":agents:Sink": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":agents:Source": "\n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:DeployInst": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Enrichment": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n 0\n 1\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:FuelFab": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:GrowthRegion": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:ManagerInst": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Mixer": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Reactor": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Separations": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Sink": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Source": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n",
+ ":cycamore:Storage": "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n"
+ },
+ "specs": [
+ ":agents:KFacility",
+ ":agents:NullInst",
+ ":agents:NullRegion",
+ ":agents:Predator",
+ ":agents:Prey",
+ ":agents:Sink",
+ ":agents:Source",
+ ":cycamore:DeployInst",
+ ":cycamore:Enrichment",
+ ":cycamore:FuelFab",
+ ":cycamore:GrowthRegion",
+ ":cycamore:ManagerInst",
+ ":cycamore:Mixer",
+ ":cycamore:Reactor",
+ ":cycamore:Separations",
+ ":cycamore:Sink",
+ ":cycamore:Source",
+ ":cycamore:Storage"
+ ]
+}
diff --git a/neams/templates/DeployInst.tmpl b/workbench/templates/DeployInst.tmpl
similarity index 100%
rename from neams/templates/DeployInst.tmpl
rename to workbench/templates/DeployInst.tmpl
diff --git a/neams/templates/Enrichment.tmpl b/workbench/templates/Enrichment.tmpl
similarity index 100%
rename from neams/templates/Enrichment.tmpl
rename to workbench/templates/Enrichment.tmpl
diff --git a/neams/templates/FuelFab.tmpl b/workbench/templates/FuelFab.tmpl
similarity index 100%
rename from neams/templates/FuelFab.tmpl
rename to workbench/templates/FuelFab.tmpl
diff --git a/neams/templates/GrowthRegion.tmpl b/workbench/templates/GrowthRegion.tmpl
similarity index 100%
rename from neams/templates/GrowthRegion.tmpl
rename to workbench/templates/GrowthRegion.tmpl
diff --git a/neams/templates/KFacility.tmpl b/workbench/templates/KFacility.tmpl
similarity index 100%
rename from neams/templates/KFacility.tmpl
rename to workbench/templates/KFacility.tmpl
diff --git a/neams/templates/ManagerInst.tmpl b/workbench/templates/ManagerInst.tmpl
similarity index 100%
rename from neams/templates/ManagerInst.tmpl
rename to workbench/templates/ManagerInst.tmpl
diff --git a/neams/templates/Mixer.tmpl b/workbench/templates/Mixer.tmpl
similarity index 100%
rename from neams/templates/Mixer.tmpl
rename to workbench/templates/Mixer.tmpl
diff --git a/workbench/templates/NullInst.tmpl b/workbench/templates/NullInst.tmpl
new file mode 100644
index 0000000..f09919d
--- /dev/null
+++ b/workbench/templates/NullInst.tmpl
@@ -0,0 +1 @@
+NullInst= null
\ No newline at end of file
diff --git a/workbench/templates/NullRegion.tmpl b/workbench/templates/NullRegion.tmpl
new file mode 100644
index 0000000..43e152f
--- /dev/null
+++ b/workbench/templates/NullRegion.tmpl
@@ -0,0 +1 @@
+NullRegion= null
\ No newline at end of file
diff --git a/neams/templates/Predator.tmpl b/workbench/templates/Predator.tmpl
similarity index 100%
rename from neams/templates/Predator.tmpl
rename to workbench/templates/Predator.tmpl
diff --git a/neams/templates/Prey.tmpl b/workbench/templates/Prey.tmpl
similarity index 100%
rename from neams/templates/Prey.tmpl
rename to workbench/templates/Prey.tmpl
diff --git a/neams/templates/Reactor.tmpl b/workbench/templates/Reactor.tmpl
similarity index 100%
rename from neams/templates/Reactor.tmpl
rename to workbench/templates/Reactor.tmpl
diff --git a/neams/templates/Separations.tmpl b/workbench/templates/Separations.tmpl
similarity index 100%
rename from neams/templates/Separations.tmpl
rename to workbench/templates/Separations.tmpl
diff --git a/neams/templates/Sink.tmpl b/workbench/templates/Sink.tmpl
similarity index 100%
rename from neams/templates/Sink.tmpl
rename to workbench/templates/Sink.tmpl
diff --git a/neams/templates/Source.tmpl b/workbench/templates/Source.tmpl
similarity index 100%
rename from neams/templates/Source.tmpl
rename to workbench/templates/Source.tmpl
diff --git a/neams/templates/Storage.tmpl b/workbench/templates/Storage.tmpl
similarity index 100%
rename from neams/templates/Storage.tmpl
rename to workbench/templates/Storage.tmpl
diff --git a/neams/templates/init_template.tmpl b/workbench/templates/init_template.tmpl
similarity index 100%
rename from neams/templates/init_template.tmpl
rename to workbench/templates/init_template.tmpl