From ad7bcbf35742bf8d68442f074cf7b69c47d4d171 Mon Sep 17 00:00:00 2001 From: nh916 Date: Sat, 12 Aug 2023 11:33:50 -0700 Subject: [PATCH] CRIPT Python SDK 2.0 First Release (#272) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * updated read me * added CRIPT logo * updated python badge to python 3.7 instead of python 3.9 * added a section that "we invite contribution" * Create .gitignore ignoring common files and directories that are unneeded * added issue templates * Update README.md * removed MANIFEST from the .gitignore list * Update .gitignore * removed smartphone section * Update .gitignore added mypy cache to be ignored * created setup.py and setup.cfg * Create pyproject.toml * setup documentation mkdocs.yaml file * Create requirements_docs.txt * Create extra.css * Create index.md brought it over from the old sdk docs * added .pytest_cache to .gitignore file * Create pull_request_template.md * moved pyproject.toml from src/ to the project root directory * made nodes/ a package * added supporting nodes * file.py * group.py * user.py * added primary_node.py * added the class for all primary nodes * renamed primary_node.py to primary_base_node.py to make it clearer and more self documenting * added a shell of all subobjects * reformatted Primary and Supporting Nodes * updated PR template * Make Cript installable via `pip` (#16) * make cript installable * add install test line * explicitly request python3 in tests * add missing nodes * fixing init files * add missing file * simplify test * fix imports * add missing nodes * fixing init files * simplify test * created primary_node.py * added str method to primary_node.py * added str method to primary_node.py * working on the abstract primary_node.py * added all fields and default values to primary_base_node.py * updated primary_base_node.py * updated __str__ method * updated __str__ method * making URL and UID into string types only if there is no URL/UID then the URL/UID string is empty * updated docstring for base __str__ method * address requested changes * add getter properties * first draft * parameter works * add missing file * remove typo * remove empty nodes * add nice doc strings * add test to CI * bring primary node up to date * blackify * blackify * add mock validation function * repsond to Navid's comments * removing old doc strings * explain None value for parameter unit * ensure correct setters * remove unnecessary json serialization * safe money on tests * json deserialization works * add user friendly access to load nodes * finishing touches on json serialization * Add license and contributor tracking * reenable all tests * updated README.md with disclaimer that the SDK is still under development * Create LICENSE.md * updated readme * Create SECURITY.md * remove duplicate license * specify right license file * trunk for software quality (#34) * add trunk init * fix tech debt * reformat readme * fix CLI tests * fixed typo for `Dependencies: JSONSchema` * fixed typo for `Dependencies: Requests` and `Dependencies: PyTests` and `Dependencies: mypy` * Add Quantity implementation * add tests for quantity * Create codeql.yml # Description This workflow running on ever PR is good to catch any security vulnerabilities early * Implement Software Subobject (#28) * temporary step * add tests for citation and reference * add new software node file * fix import * add missing url property * fix test for refernce * Navids comments * implement the software subobject * make better exceptions for JSON * trunk code fixes * fix lines * some fixes * code style * autoformat * Impelement removing children from a node (#23) * implement removing nodes fully * add tests * autoformat * Implement a local search for nodes (#24) * implement search of child nodes * add tests for search * add comments to implementation trick * add additional comments * fix code style * cycle protection in search * Create api (#18) * WIP API object init * WIP API * moved API into an api/ directory * added empty save, get_my_groups, gey_my_projects * working on delete method to make it clean and intuitive * added empty PrimaryNode base class * wrote the delete method for the api class * Update .gitignore added mypy into ignored section * Create requirements.txt * created an empty file for Group and User * setup get_my_user( ) method for the API class * fixed grammar of the ConnectionError exception * added placeholders for project, user, and group nodes * added type hinting for return values * added type hinting to get_my_groups( ) * added type hinting to get_my_projects( ) * added type hinting to get_my_user( ) * setting up search method * added docstrings * added comment explaining _valid_search_modes.py * wrote _get_db_schema() method and it's working * added tests for api.py * moved api test into api directory * renamed to api to test_api * reformatted everything with black * setting up tests for getting db schema * added typehint to a get_db_schema method * added setup for pypi pretty much brought over what existed in the old SDK and changed it where it made sense * converted docstrings from google to numpy as decided * removed the main function * added tests for api * reformatted setup.py with black * formatting tests * small formatting * added todo to api.py * updated setup.cfg based on develop branch * added docstrings and reformatted code * moved requirements * moved black and mypy from requirements.txt to requirements_dev.txt * updated mypy to newest version * formatted requirements.txt * renamed ConnectionError to CRIPTConnectionError because ConnectionError is a built in error * put API directory into src/cript/api/ directory updated docstrings * raising CRIPTConnectionError * added error for InvalidVocabulary * updated requirements.txt * created functions for fetching and validating controlled vocabulary * created functions for fetching and validating controlled schema * caching host from API and changing classes as needed * moving delete to be after * added placeholder tests for api, vocab, and schema functions and class * updated docstrings for what search returns * changed paths to be from cript. * changed docstring to be PrimaryBaseNode * fixed repeated variable * added API to __init__.py * removing global host from api.py tests are passing and working fine * stripping ending "/" from host * fix code style * Cache api (#29) * Improve error messages * Cache global API and require context managers. * Add env variables for API * added brief docstrings to the save method * added requested changes of delete method * changed host for schema_validation.py and vocabulary.py * updated test_vocab_and_schema.py * updated requirements_dev.txt * removed unused import * formatted test_vocab_and_schema.py * use access function for global cache API * fix json type confusion --------- Co-authored-by: Ludwig Schneider * Implementation of all remaining subobjects (#41) * implement property * test property * implement condition * implement tests for condition * implement ingredient * add dummy values for property test * add dummy values for condition test * implement equipment with test * implement computation forcefield * add computational forcefield * updated README.md python version changed Installation section to say it requires Python 3.7+ instead of 3.9+ previously * make a common base exception for CRIPT (#43) * make a common base exception for CRIPT * fix abc * Finished Supporting Nodes (#44) * added group constructor * added dummy validate method * reformatted with black * WIP group.py * added UneditableNodeError and UneditableAttributeError * added base attributes to core.py base class * finished setting up user.py * set up user tests * formatted with black * set up group node and test * added read only common attributes in the BaseNode class * added test_group_to_json to test_group.py * added user node and tests * set up file.py * update file node test file * added validation of vocabulary * optimized imports * updated test_group.py and test_file.py * updated test_file.py * changed users list in group to always be a list * updated user.py * added kwargs to user and group.py * updated comments * file passing tests * group node passing all tests * user node passing all tests * added kwargs to file constructor * all files formatted and working * changed type for type_ * removed UneditableNodeError removed the error since it is not being used * changed _verify_file_source scope * updated handling uploading of a file * updated file.py * updated file.py * small updates * updated test_file.py * updated test_file.py * autoformat with trunk * making requested changes for group.py * changed the docstrings to refer to the data model * changed dataclass List to be set to `field(default_factory=list)` * updated the docstrings for `@users.setter` * making requested changes for user.py * changed the docstrings to refer to the data model * changed dataclass List to be set to `field(default_factory=list)` * making requested changes for file.py * changed the docstrings to refer to the data model * making requested changes for user.py and group.py * calling `validate()` in the constructor * calling the base node validate method * autoformat with trunk * setting group.py raises AttributeError * removed setters for group.py * setting an attribute on group.py raises an `AttributeError`instead of a custom `UneditableAttributeError` * updated tests_group.py to work correctly with `AttributeError` * setting user.py raises AttributeError * removed setters for user.py * setting an attribute on user.py raises an `AttributeError`instead of a custom `UneditableAttributeError` * updated tests_user.py to work correctly with `AttributeError` * removed `UneditableAttributeError` * removed `UneditableAttributeError` import because it was an unused import in * group.py * user.py * test_group.py * test_user.py * edited docstrings for test_group.py and test_user.py * removed `UneditableAttributeError` from exceptions.py * wrote `serialize_file_to_json` test_file.py * changed pytest return fixture to be yield fixture so all tests can perform the same * wrote out test_serialize_file_to_json, and it is written correctly, but found a bug within the serializer * wrote test_group_node_json_roundtrip it is currently coming out wrong * updated test_group.py * updated test_user.py * wrote `test_user_serialization_and_deserialization` * changed `user_node` to be a yield fixture insted of a return fixture * autoformat with trunk * fix tests. And make type_ type again * updated test_group.py and test_user.py * tests are passing * user has a list of groups, that probably should be taken out because the new backend does not need it * updated test_group.py and test_user.py --------- Co-authored-by: Ludwig Schneider * Updated pull request template added a `Tests` section to encourage transparency about how the code was tested. The pull request template is not mandatory and is a suggestion, but I think this could be helpful * Redesign of the API (#49) * Allow manual connect options for JupyterNote books. * remove manual caching of Vocab * reform how vocab and schema is represented * Fix tests, as they can't curently connect to back end * fix tests * --amend * load req. for testing * syntax * implements Navids request of integrating vocab and schema into API * fix issues * Implement a cycle prevention (#42) * add cycle protection in validation * autoformat * better name for cycle detection * fix name everywhere * trunk formatted codeql.yml * fix trunk check * Finished primary nodes (#52) * added a setter for all list fields within project.py * added some docstrings more docstrings is needed * added a small comment * started adding tests to Project node * optimized imports * added docstrings * tests * added project constructor and validate method * updated test_project.py * setup collection.py and created test_collection.py * added more to the __init__ method for collection.py * added link to the data model paper * set up experiment.py * set up test for test_collection.py * formatted with black experiment.py * set up tests for experiment node * added docstrings and formatting * set up inventory.py and test_inventory.py * updated and alphabetically ordered the __init__.py within primary_nodes * updated __init__.py for cript/ to include material * updated __init__.py for cript/ to include material * added condition to __init__.py src/cript/nodes/subobjects/__init__.py src/cript/nodes/__init__.py src/cript/__init__.py * setup process node with no tests yet * added constructor * setup data node * setup computation.py node * setup computational_process.py * setup reference node * updated collection.py * added **kwargs to the constructor * returning copies of the dataclass attribute * updated collection.py * added **kwargs to the constructor * returning copies of the dataclass attribute * updated computational_process.py * added **kwargs to the constructor * returning copies of the dataclass attribute * updated computational_process.py * set _json_attrs variable * added **kwargs to the constructor * returning copies of the dataclass attribute * updated collection.py and computational_process.py * set _json_attrs variable * updated experiment.py * set _json_attrs variable * added **kwargs to the constructor * returning copies of the dataclass attribute * updated experiment.py * set _json_attrs variable * added **kwargs to the constructor * returning copies of the dataclass attribute * updated process.py * set _json_attrs variable * added **kwargs to the constructor * returning copies of the dataclass attribute * updated process.py * set _json_attrs variable * added **kwargs to the constructor * returning copies of the dataclass attribute * updated reference.py * set _json_attrs variable * added **kwargs to the constructor * returning copies of the dataclass attribute * setup material.py node * setup conftest.py * removed fixture from test_project.py and instead have it in conftest.py * set up the needed tests for the material node * set up the needed tests for the data node * set up test_reference.py * set up test_computation.py * updated test_data.py * updated test_collection.py * fixing imports * fixing circular import error in all the nodes. Currently typing most things as Any instead of proper node for a quick fix * add correct typings later * reference and software nodes are written as subojects instead of primary objects. * added prerequisite_processes field to process.py * simple material test passing correctly * added name attribute to material.py * importing computation_forcefield correctly in packages * updated process.py dataclass list defaults * updated dataclass to have default values of `field(default_factory=list)` instead of `None` * successfully running simple process test * fixed typo in material.py * test_create_simple_material running fine * test_all_getters_and_setters successful! * test_serialize_material_to_json successful! * test_serialize_material_to_json successful * created complex_material yield fixture * formatted test_material.py with black * test_deserialize_material_from_json filing added the test, but it is failing the assertions * test_create_inventory successful! * changed inventory material to `materials` to make it more self-documenting * temp fix * test_get_and_set_inventory successful! * reformatting * crude fix for back end providing too much data * updated test_data.py * updated test_data.py * importing references in all __init__.py * changed reference.py from type_ to type * changed computational_process.py from type_ to type * updated reference.py * set dataclass attributes from default None to field(default_factory=list) * url property had a typo * changed setter for authors parameter to be `new_authors` instead of `new_authors_list` * importing reference from primary nodes __init__.py * test_complex_process_node successful! * test_serialize_process_to_json fail! * formatted reference.py * all primary nodes use default field * updated primary nodes to use `field(default_factory=list)` instead of `None` * fix the remaining material problem. Inherit the JsonAttributes from the PrimaryBaseNode instead of BaseNode. * autoformat with trunk * updated test_reference.py * updated test_reference.py * fix default int values * autoformat with trunk * updated test_reference.py * updated test_reference.py type hinting * test_serialize_reference_to_json successful! * added _json_attrs to computational_process.py * test_create_simple_computational_process successful * added test_create_complex_computation_node to test_computation.py * test_create_simple_data_node successful * test_serialize_reference_to_json successful * creating a conftest.py with all nodes to reuse for all tests * typo in getter of data.py sample_preperation * test_create_simple_computational_process successful! * added `simple_quantity_node` & `simple_ingredient_node` fixtures * test_create_simple_computational_process successful * test_data.py tests successful * test_serialize_data_to_json successful * formatted test_data.py * fix software test * fix reference test * fix API import error * fix collection typo * fix a bunch of trunk error * fix faulty json error test * Remove def json (#51) * fix a bunch of trunk error * fix faulty json error test * only report json with sorted keys and remove defaults * make tests pass again * fix tests that didn't tolerate the changed json output * set default None value of list to an empty list * updated computation.py * updated conftest.py added software and software_configuration node * test_create_complex_computation_node successful! * updated data.py * test_serialize_computation_to_json successful * test_data_getters_and_setters successful! * dataclass inheriting from PrimaryBaseNode * optimized imports * test_serialize_process_to_json successful! * added `equipment` and `property` to conftest.py * fixed process.py prerequisite_processes * test_process.py passing * test_process.py added comment * fix user node * updated conftest.py * fixes for experiment * updated conftest.py * removed unused import * and removed unused copy * fix experiment None list * updated test_file.py * using file node fixture * and removed copy because comparing dicts is more accurate than comparing strings * updated test_reference.py * using simple_reference_node fixture * added test for getters and setters * formatted test_process.py * making base notes attribute editable * test_computation_getters_and_setters successful * optimized imports * added simple_inventory_node * test_inventory_serialization successful * test_create_complex_computational_process successful * fixed typo in computational_process.py * wrote test for test_create_complex_computational_process * added Integration tests to test_computational_process.py * formatted test_computational_process.py * fixed experiment name * added name to dataclass and set up getters and setters * experiment creation tests successful! * test_create_simple_experiment successful * test_create_experiment_full successful * added simple_experiment_node fixture * test_all_getters_and_setters_for_experiment successful! * updated test_experiment.py * updated docstrings * test_experiment_json successful * formatted test_experiment.py * added test to test_experiment.py * formatted conftest.py * test_create_simple_collection successful * added `simple_collection_node` to conftest.py * formatted test_experiment.py * test_collection_getters_and_setters successful * added complex_collection_node to fixtures * formatted conftest.py * conftest.py: added docstrings * test_serialize_collection_to_json successful * added integration tests * added comments * test_create_simple_project successful * formatted collection.py * added simple_project_node to conftest.py * updated project.py * removing `Files: List[File]` from `Project node` * Files are no longer a part of the Project node per the manuscript * removing getters, setters, from dataclass and constructor https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8 * commented out `Group` nodes * `Groups` are not part of the Project node per the manuscript * removing getters, setters, from dataclass and constructor https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8 * removed `_set_node_or_list()` as it is not needed * test_project_getters_and_setters successful * added fixtures to conftest.py * added complex_project_node * added complex_material_node * added simple_computation_forcefield * formatted conftest.py with black * test_material.py upadted and working fine * test_serialize_project_to_json successful * added integration tests for test_project.py * updated computational_process.py * updated data.py * updated test_data.py and test_experiment.py * updated inventory.py * inventory.py: added an if statement if the list attributes are provided as `None` * updated docstrings in reference.py * updated material.py and process.py updated if statement formatting in both files * all tests passing successfully * formatted process.py * test_serialize_computational_process_to_json successful * test_reference.py added TODO * updated test_project.py swapped out complex_project_node for simple_project_node to make reading it and seeing it easier * updated test_material.py * fixed the simple_material fixture to be function scoped instead of session scoped * swapped out complex_material_node for simple_material_node to make reading it and seeing it easier * updated test_experiment.py * using simple_experiment_node for serialization instead of complex_experiment_node to make reading it easier and catching errors easier * test_computation.py made expected node easier to read * fix import test_collection.py * updated test_complex_process_node * test_complex_reference successful * removed unused import `copy` * updated test_material.py * updated docstrings for inventory.py * fixing format for trunk * fixing format for trunk * added `name` and `notes` to primary_base_node.py constructor * added name and notes to computational_process.py * added name and notes to data.py * updated docstrings for computational_process.py * updated data.py dataclass * updated data.py constructor with `name` and `notes` * updated inventory.py constructor with `name` and `notes` * updated material.py constructor with `name` and `notes` * updated process.py constructor with `name` and `notes` * updated project.py constructor with `name` and `notes` * updated project.py with `name` and `notes` * added `name` and `notes` in the constructor * added group to project again * updated collection.py to inherit from PrimaryBaseNode dataclass * removed experiment.py name setter using the PrimaryBaseNode for name property and setter * added a setter for name in primary_base_node.py * formatted with black * updated docstrings for collection.py * made Project collection optional within constructor * adding Group instead of Any avoiding circular import * making notes within the constructor optional * material tests passing successfully * inventory tests passing successfully * added notes and name to computation.py * data tests passing successfully * computation tests passing successfully * computational_process tests passing successfully * process tests passing successfully * added name and notes to constructor * experiment tests passing * collection tests passing * project tests passing * formatted with black * updated test_material.py to use conftest.py test_material.py used to use own fixtures that lead to repeat code and is now instead using conftest.py * updated test_complex_process_node to use conftest fixtures * test_reference.py using conftest.py fixture * added todo to conftest.py * formatted conftest.py with black * added todo in conftest.py * added complex_data_node * updated complex_data_node fixture * had a small typo where it said "fucntion" instead of "function" * and the name had a typo that said "test_create_complex_data_node" instead of "complex_data_node" * optimizing import * formatted test_nodes_no_host.py with black * using isort for files * updated imports for tests_nodes_no_host.py * formatting and optimizing imports using isort and black * updated tests conftest.py * reformatted with black * reformatted with black * reformatted with black * updated test_node_util.py * trunk format * fix json error raise * gfix the API conftest to use yield --------- Co-authored-by: Ludwig Schneider * Python SDK Documentation Setup (#57) * formatted test_data.py * fix software test * fix reference test * fix API import error * fix collection typo * fix a bunch of trunk error * fix faulty json error test * Remove def json (#51) * fix a bunch of trunk error * fix faulty json error test * only report json with sorted keys and remove defaults * make tests pass again * fix tests that didn't tolerate the changed json output * set default None value of list to an empty list * updated computation.py * updated conftest.py added software and software_configuration node * test_create_complex_computation_node successful! * updated data.py * test_serialize_computation_to_json successful * test_data_getters_and_setters successful! * dataclass inheriting from PrimaryBaseNode * optimized imports * test_serialize_process_to_json successful! * added `equipment` and `property` to conftest.py * fixed process.py prerequisite_processes * test_process.py passing * test_process.py added comment * fix user node * updated conftest.py * fixes for experiment * updated conftest.py * removed unused import * and removed unused copy * fix experiment None list * updated test_file.py * using file node fixture * and removed copy because comparing dicts is more accurate than comparing strings * updated test_reference.py * using simple_reference_node fixture * added test for getters and setters * formatted test_process.py * making base notes attribute editable * test_computation_getters_and_setters successful * optimized imports * added simple_inventory_node * test_inventory_serialization successful * test_create_complex_computational_process successful * fixed typo in computational_process.py * wrote test for test_create_complex_computational_process * started on docs * started data, file, group documentation * started on mkdocs.yml * added Integration tests to test_computational_process.py * formatted test_computational_process.py * fixed experiment name * added name to dataclass and set up getters and setters * experiment creation tests successful! * test_create_simple_experiment successful * test_create_experiment_full successful * added simple_experiment_node fixture * test_all_getters_and_setters_for_experiment successful! * updated test_experiment.py * updated requirements_docs.txt * updated file.py * updated docstrings * test_experiment_json successful * formatted test_experiment.py * added test to test_experiment.py * formatted conftest.py * test_create_simple_collection successful * added `simple_collection_node` to conftest.py * formatted test_experiment.py * test_collection_getters_and_setters successful * added complex_collection_node to fixtures * formatted conftest.py * conftest.py: added docstrings * test_serialize_collection_to_json successful * added integration tests * added comments * test_create_simple_project successful * formatted collection.py * added simple_project_node to conftest.py * updated project.py * removing `Files: List[File]` from `Project node` * Files are no longer a part of the Project node per the manuscript * removing getters, setters, from dataclass and constructor https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8 * commented out `Group` nodes * `Groups` are not part of the Project node per the manuscript * removing getters, setters, from dataclass and constructor https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8 * removed `_set_node_or_list()` as it is not needed * test_project_getters_and_setters successful * added fixtures to conftest.py * added complex_project_node * added complex_material_node * added simple_computation_forcefield * formatted conftest.py with black * test_material.py upadted and working fine * test_serialize_project_to_json successful * added integration tests for test_project.py * updated computational_process.py * updated data.py * updated test_data.py and test_experiment.py * updated inventory.py * inventory.py: added an if statement if the list attributes are provided as `None` * updated docstrings in reference.py * updated material.py and process.py updated if statement formatting in both files * all tests passing successfully * formatted process.py * test_serialize_computational_process_to_json successful * test_reference.py added TODO * updated test_project.py swapped out complex_project_node for simple_project_node to make reading it and seeing it easier * updated test_material.py * fixed the simple_material fixture to be function scoped instead of session scoped * swapped out complex_material_node for simple_material_node to make reading it and seeing it easier * updated test_experiment.py * using simple_experiment_node for serialization instead of complex_experiment_node to make reading it easier and catching errors easier * test_computation.py made expected node easier to read * added all subobjects to documentation * added primary_nodes to documentation * fix import test_collection.py * updated test_complex_process_node * test_complex_reference successful * removed unused import `copy` * updated test_material.py * updated docstrings for inventory.py * fixing format for trunk * fixing format for trunk * added CRIPT logo to home page and site logo * testing out different docstring styles * Google docstrings in file.py * added JSON to file.md * numpy docstrings in users.py * pep docstrings in group.py * worked on data.py documentation * updated file.py documentation * updated file.py with docstrings * added faq.md * added exceptions.md * updated mkdocs.yml * reordered pages * updated group documentation * updated group documentation * added JSON representation * updated group documentation * updated user node documentation * added definition for the user node * updated group documentation * updated group documentation * updated user documentation * updated user documentation * updated user documentation * updated docstrings type * updated file documentation * updated group documentation * updated user documentation * added description to material node * updated material documentation * updated material documentation * updated material documentation * updated material documentation * added `name` and `notes` to primary_base_node.py constructor * added name and notes to computational_process.py * added name and notes to data.py * updated docstrings for computational_process.py * updated data.py dataclass * updated data.py constructor with `name` and `notes` * updated inventory.py constructor with `name` and `notes` * updated material.py constructor with `name` and `notes` * updated process.py constructor with `name` and `notes` * updated project.py constructor with `name` and `notes` * updated project.py with `name` and `notes` * added `name` and `notes` in the constructor * added group to project again * updated collection.py to inherit from PrimaryBaseNode dataclass * removed experiment.py name setter using the PrimaryBaseNode for name property and setter * added a setter for name in primary_base_node.py * formatted with black * updated docstrings for collection.py * made Project collection optional within constructor * adding Group instead of Any avoiding circular import * making notes within the constructor optional * material tests passing successfully * inventory tests passing successfully * added notes and name to computation.py * data tests passing successfully * computation tests passing successfully * computational_process tests passing successfully * process tests passing successfully * added name and notes to constructor * experiment tests passing * collection tests passing * project tests passing * formatted with black * updated test_material.py to use conftest.py test_material.py used to use own fixtures that lead to repeat code and is now instead using conftest.py * updated test_complex_process_node to use conftest fixtures * test_reference.py using conftest.py fixture * added todo to conftest.py * formatted conftest.py with black * added todo in conftest.py * added cript * updated documentation for some attributes and definition updated documentation for following attributes * `type` * `citations` * `process` * `files` * added complex_data_node * adding complex data node as an example * added minimal and maximal data node * added reminder comment to later add developer documentation * moving navigation to the top of mkdocs.yml * updated complex_data_node fixture * had a small typo where it said "fucntion" instead of "function" * and the name had a typo that said "test_create_complex_data_node" instead of "complex_data_node" * updated requirements_docs.txt * updated documentation for file.py * updated mkdocs.yml * updated file.py * optimizing import * formatted test_nodes_no_host.py with black * using isort for files * updated imports for tests_nodes_no_host.py * formatting and optimizing imports using isort and black * updated tests conftest.py * reformatted with black * reformatted with black * reformatted with black * updated test_node_util.py * updated requirements_docs.txt * trunk format * fix json error raise * gfix the API conftest to use yield * mkdocs.yml watching `src/` * telling `mkdocstrings` to watch `src/` for changes * file node docs done * updated project.py collections property docs updated docstrings for collections property within project.py * added docs for project.Material * changing Material -> Materials like in datamodel * project node docs done * added data model link to project * removed `required column` in docs * added docstrings to collections.citations * added docstrings to collections.cript_doi * added docstrings to collections cript_doi and inventories * updated collection's inventory docs * updated collection's experiments docs * updated collections constructor docs * removed validate from collection.py as it is unneeded * update wording for collections constructor * updated definition docstring for collection.py * added definition docstrings to experiment.py * updated experiment.py constructor docs * updated experiment.py constructor docs * updated experiment.py process docs * updated experiment.py funding docs * updated experiment.py process docs with example * updated experiment.py data docs * updated experiment.py computations docs * updated experiment.py computational_process docs * updated experiment.py citations docs * updated inventory.py docs definition * updated inventory.py constructor docs * updated inventory.py constructor docs * updated inventory.py constructor docs * updated material docs attributes section * finished first draft of material node docs * finished first draft of process node docs * updated process.py docs * updated process.py docs * merging material from dev to docs and keeping docs * updated data docs after merge conflict fix * updated experiment.py docs after merge conflict fix * updated material.py for test success! all tests successful! * updated experiment.py docs * updated project.py docs * updated experiment.py.py docs * updated docs material * finished docs first draft computation.py * added description to computation.py * added comment to TODO * finished computational_process.py docs first draft * finished computational_process.py docs first draft * finished reference docs first draft * added definition section added definition section computational_process.py and computation.py * updated navigation in mkdocs.yml removed user node from primary nodes as it is under Supporting Nodes * basic auto format * fix not auto format issues --------- Co-authored-by: Ludwig Schneider * JSON node: [node type] (#71) * make node field a list * implementation of the change * adjust tests to {'node': ['asdf']} * add test for wrong node specification * created docs CI/CD (#69) * created docs CI/CD * added comment and shortened file name * updated .gitignore to ignore coverage.py (#68) * updated .gitignore to ignore coverage.py * updated .gitignore to ignore coverage.py * created simple issue template (#64) * created simple issue template I keep finding myself not needing the other two issue templates, and I keep making my own. I think this is simplified and can help a lot! * auto format --------- Co-authored-by: Ludwig Schneider * Introduce UID management to nodes and JSON (#72) * first failing attempt * first support for uids * remove reference subobject * support uid for nodes * support return of handled ids, when json is generated. * make primary nodes comply * update conftest.py with subobjects * add missing files * more mising files * get a few more tests passing * add more subobject tests * almost all test back in and passing * resolve trunk issues * fix import * change uid prefix * make a uid for every node * remove cycle prevention * implement deepcopy handling for uid's * merge hell, getting tests ready again * making progress * getting uid serialization right * fixed problem where all dictionaries ought to be nodes * change cycle test to nodes, that actually can have cycles. * make deepcopy an actual deep copy * make conftest less of a beast * missing files * renaming complete simple and complex * mend * Create CI/CD of dependency security scanner (#60) * Create dependency-review.yml * trunk formatted dependency-review.yml with * fix formatting --------- Co-authored-by: Ludwig Schneider * renaming GitHub workflow to tests.yml (#79) * renaming workflow to tests.yml * renaming workflow to tests * Wip api (#59) * setup paginator.py * Update paginator.py * moving `__enter__` and `__exit__` to the top * moving connect and disconnect at top it makes more sense logically for connect and disconnect to be near `__enter__` and `__exit__` * updated comments for `_load_controlled_vocabulary` * updated test_api.py * updated docstrings and made them simpler and removed returns and parameter * added cript_api as parameter to all api tests * added more tests that were needed such as * `test_get_db_schema_from_api` * `test_get_controlled_vocabulary_from_api` * `test_api_search_material_by_uuid` * `test_api_material_exact_search` * added `TODO` for `cript_api` fixture to use from conftest.py * updated test_api.py with typehints * updated test_api.py added `test_create_api` * updated test_api.py added 2 tests: * test_get_my_group_node_from_api * test_get_my_projects_from_api * worked on getting full vocabulary from API * renamed `_load_controlled_vocabulary` to `_get_and_set_vocab` * worked on `_get_and_set_vocab` to api.py * initialized `_vocabulary` with empty dict * added `test_api_http_warning` test_api.py * updated api.py to get vocabulary from cache if exists * fixed _get_and_set_vocab cache issue * updated test_get_db_schema_from_api * test_get_db_schema_from_api is successful * updated `get_vocabulary` method and removed test for it * added `test_is_vocab_valid` and added Example to `_get_and_set_vocab` * test_is_vocab_valid successful * renamed `_get_and_set_vocab` to `get_vocab` * The user can use the same method to get the vocabulary as it is used by the program * less redundancy and less code to maintain * Tests passing successfully * updated `InvalidVocabulary` to work correctly * all tests currently passing * successfully getting `db_schema` test_get_db_schema_from_api passing successfully * worked on `test_get_db_schema_from_api` * wrote `_get_db_schema` method * renamed `_schema` to `_db_schema` in api.py * renamed `_load_db_schema()` to `_get_db_schema()` * removing TODO for API versioning * will just use `v1` for now and will optimize later * formatted exceptions.py with black * test_get_db_schema_from_api successful * wrote `test_get_db_schema_from_api` * `test_get_db_schema_from_api` is successfully written * wrote `test_is_node_schema_valid` * `test_api_with_invalid_host` is successful * `test_is_node_schema_valid` is failing * Update api.py added TODO * seperated vocabulary categories * seperated out vocabulary categories into vocabulary_categories.py to keep the code cleaner * refactored the api.py file to use vocabulary_categories.py * reformatted api.py * reformatted with black * reformatted and changed function position in test_api.py * reformatted api.py moving property to top of class * test_is_node_schema_valid fail * created CRIPTAPISaveError for when the node cannot be saved This exception can be used when the user tries to save their node to the API and the API responds with an http status code that is not `200` This error code tries to give good errors to the user for easy debugging * commenting out install.yml for now * working on api.py `save` method * commented out delete method * added comments to test_api.py * test_api.py formatted with black * formatted with black `src/cript/api/exceptions.py` * filled in more of the save method formatted with black `src/cript/api/api.py` * updated requirements_dev.txt updated black and mypy versions added coverage.py * updated valid search modes to be what the API has available * stripping host of spaces * created InvalidSearchModeError * added docstrings and formatted api/exceptions.py * formatted _valid_search_modes.py * added `_prepare_host` method for cleaning host * added `_prepare_host` method for cleaning host * worked on `search` method * added docstrings * added if statement and started making query * made `_prepare_host` into a function instead of method * formatted api.py with black * updated test_api.py * reading token from .env file * wrote function for `test_prepare_host()` * removed some separate test functions: * `test_api_search_material_by_uuid()` * `test_api_search_material_by_url()` * `test_api_material_exact_search()` * working on search function * converted enum to list of strings * added test comments to test_api.py * using SearchModes as enum * added `_get_valid_search_modes()` to api/exceptions.py updated exceptions.py for InvalidSearchModeError * no change * search method is done! * formatted with black api.py and valid_search_modes.py * changed available search modes in InvalidSearchModeError * updated InvalidSearchModeError error message * updated exceptions.py * small updates to api.py and test_api.py * test_prepare_host successful * stripping url correctly * current_page_number is a property with getters and setters * added `fetch_page_from_api()` method * using url directly as given without the page parameter without needing to prepare it * refactored paginator name * instead of `paginator.next()` it will be `paginator.next_page()` and `paginator.previous_page()` * formatted paginator.py with black * finished paginator but no tests yet * updated save method to take a Project node instead of primary node * host must start with http or https * added a line for http or https within _prepare_host() * added an exception for host without http in the name * tested host exception * api host putting "/api/v1" inside of host variable * removing "/api/v1" in other areas to keep the code clean and DRY * get_vocab * _get_db_schema * save * created a method to check API connection and put place holders * putting check for api connect towards the end of the init function * cleaning up _prepare_host assignment to self.host * updated * updated `CRIPTAPISaveError` to be more readable and better UX * cript_api.save working and testing correctly * added search modes to lib and working on search and paginator * added SearchModes to library to be easily found * working on search with paginator, but not done yet * allowing value_to_search to be None because an empty string just doesn't make sense when writing it * giving paginator http header instead of token * removed TODO comment because it is not needed and can be bad design * save is reset and ready for a real node * search and paginator working correctly * added docstrings to paginator.py constructor * api.save() is using Project type hinting correctly and there are no more circular import errors * optimized imports and removed unused imports for api * added an else that raises an error * reordered arguments for paginator.py because it makes more sense * search and search_exact I think are doing okay * putting unsupported methods at the end of api.py * added comments for tests * remove print statement * updated `is_node_schema_valid()` api method * updated `is_node_schema_valid()` api method started but not tested yet * worked on CRIPTNodeSchemaError * reformatted with black * merging develop into wip_api * added .converage to .gitignore * merged `develop` into `wip_api` * formatted with black * added node_type property and save is sending correct request to the server * renamed test project renamed `test_api_save_material` to `test_api_save_project` since we can only save projects now * added `SearchModes` and `ExactSearchModes` to package * removed `get_vocab()` from __init__ to speed up * api class was very slow because on every api initialization it would get the entire controlled vocabulary when it did not need to do it on initialization and could do it as it was needed * api search working successfully * formatted api with black * formatted test_api.py with black * search working well, but need to change all classes * search working well, but need to change all classes * added `node` to all dataclass of all nodes * added `node` field to all nodes dataclass to be used when needed by API.search() * renamed enum from `EXACT_NAME` to `NAME` * exact search has problem because API answers are not uniform * formatted material.py with black * search working fully and well! -------------------------------------------- Success: * search works with node type * search works with contains name * search works with exact name * search works with UUID -------------------------------------------- Notes: * removed `search_exact` because not needed * removed `ExactSearch` enum because not needed anymore. `api.search` handles it all * broke up tests for search into different search mode tests * always passing in a page number of 0 because it seems to have no effect on the API, but API docs specify certain places that take page numbers * paginator currently not raising `InvalidPageRequest` even though in docstrings says that it does -------------------------------------------- Future: * Need to add exception handling for a lot of it * Exceptions need to be nice CRITP exceptions * formatted with black * is_node_schema_valid is working correctly! * is_node_schema_valid is working correctly! * formatted with black * tests passing correctly * `test_prepare_host` successful! * `test_get_db_schema_from_api` successful! notes: * save project not passing anymore because I changed the save method within the API, I will have to come check that later * removed `"node": "material"` from all nodes dataclass * Add schema validation to node design * update api save to test with simple_project_node * Use class name for `node` attribute (#74) * use type(self).__name__ for node * add magic to be able to use node_type for classes and instances to get the node type --------- Co-authored-by: nh916 * removing api methods unsupported by API * delete() * get_my_user() * get_my_groups() * get_my_projects() * fixed vocab that was missing return statement * remove wrong node_type * formatted with black * added install.yml again * formatted with trunk * optimized imports and removed unused variable * formatted with trunk * added rough documentation for API * changing variable names to what data model stated * Project: had materia**s** and data model specified material * Experiment: had citation and the data model specified citation**s** * formatted docs/api.md with trunk * changed `check_initial_host_connection` to private * refactoring and renaming * api instance is private `_is_vocab_valid` and `is_node_schema_valid` all tests are passing except 2 1. `api.save()` because of deserialization 2. `api.search()` I assume because there are changes happening to the API * refactoring and renaming * api instance is private `_is_vocab_valid` and `is_node_schema_valid` all tests are passing except 2 1. `api.save()` because of deserialization 2. `api.search()` I assume because there are changes happening to the API * updated faq.md with more example code * updated api documentation * formatted examples docstrings within api search * added docstrings to Search Modes * docstrings formatting * added rough draft documentation of paginator * removed schema and vocabulary.py functions * having the schema and vocabulary methods within the global API * formatted api with black * formatted faq.md * formatted with trunk * updated FAQ * added more content * trunk formatted * separated out the Exceptions more work is needed to get these to be readable and easy to use, but it is a good start for now * trunk formatted documentation `.md` files * removing unused documentation file * renaming `get_vocab` to `fetch_vocab` * fix test_node_util * change token management and add dummy token * fix experiment * fix api __enter__ * also make CRIPT_HOST a token * refine node expectation for api check * test api with env variables only (we don't have token and host ready otherwise) * removed test_vocab_and_schema.py * the vocab and schema are no longer needed as the tests are captured within test_api.py * fixed error for api * api tests passing except for save * commenting out the `api.save()` test * added exception handling to getting the db_schema * renaming `fetch_vocab` to `get_vocab` this way it stays consistent with `get_db` * updated TODO * removing developer documentation from docs navbar * fixed documentation issues for collection.md * fixed docstrings documentation link * fixed documentation warnings for reference and mkdocs.yml * commenting out api search tests for github * trunk formatted files * optimized imports * try * windows test * typo * remove windows test --------- Co-authored-by: Ludwig Schneider * Check for orphraned nodes (#82) * first draft to check for orphaned nodes * simplify logic * rename variables * add material inventory check * add specific exceptions and helper functions * fix materials_list error * add test to check the orphan nodes detection * added more documentation to our FAQ section (#86) * added more documentation to our FAQ section * formatted with trunk * Fix API Exceptions and Write Documentation (#85) * updated `CRIPTAPIRequiredError` * renamed it from `CRIPTAPIAccessError` to `CRIPTAPIRequiredError` to make it more self documenting * added docstrings/documentation for it * fixed it properly in the code * update docstrings for `CRIPTAPISaveError` * cleaned up `InvalidVocabulary` error * cleaned up `InvalidVocabularyCategory` error formatted the code as well with black * updated `InvalidHostError` * removed `InvalidSearchModeError` class * optimized imports for api/exceptions.py * wrote docs and cleaned up `CRIPTConnectionError` * added docstrings for `InvalidHostError` * added documentation for `APIError` * updated docstrings for `CRIPTAPIRequiredError` * formatted api/exceptions.py with black * removed all method docstring for cleaner documentation * changed wording on documentation * updated documentation * updating db schema test because db schema updated (#90) * updated tests and trunk workflow (#103) * updated packages (#100) * updated packages * updated packages * Update README.md (#112) * added documentation link to it * added wiki link to it * Created CONTRIBUTING.md (#116) * Create CONTRIBUTING.md * trunk format --------- Co-authored-by: Ludwig Schneider * created docs_ci_check * Update requirements_dev.txt added `pytest==7.3.1` to requirements_dev.txt as it was missing previously * PR template updated (#99) * Created Test coverage CI (#140) * added `pytest-cov` to `requirements_dev.txt` * wrote test_coverage.yaml * added trigger to run test_coverage.yaml manually * update requirements_dev.txt mypy package * formatted test_coverage.yaml put on block within a nicer list instead of empty blocks * added matrix OS and Python Version * using matrix of ubuntu and only Python 3.7 and 3.11 having a lot of checks for coverage is probably unneeded because all coverage will likely be the same, so just testing on ubuntu will the min and max we support is probably more than enough * Validate nodes (#78) * api host putting "/api/v1" inside of host variable * removing "/api/v1" in other areas to keep the code clean and DRY * get_vocab * _get_db_schema * save * created a method to check API connection and put place holders * putting check for api connect towards the end of the init function * cleaning up _prepare_host assignment to self.host * updated * updated `CRIPTAPISaveError` to be more readable and better UX * cript_api.save working and testing correctly * added search modes to lib and working on search and paginator * added SearchModes to library to be easily found * working on search with paginator, but not done yet * allowing value_to_search to be None because an empty string just doesn't make sense when writing it * giving paginator http header instead of token * removed TODO comment because it is not needed and can be bad design * save is reset and ready for a real node * search and paginator working correctly * added docstrings to paginator.py constructor * api.save() is using Project type hinting correctly and there are no more circular import errors * optimized imports and removed unused imports for api * added an else that raises an error * reordered arguments for paginator.py because it makes more sense * search and search_exact I think are doing okay * putting unsupported methods at the end of api.py * added comments for tests * remove print statement * updated `is_node_schema_valid()` api method * updated `is_node_schema_valid()` api method started but not tested yet * worked on CRIPTNodeSchemaError * reformatted with black * merging develop into wip_api * added .converage to .gitignore * merged `develop` into `wip_api` * formatted with black * added node_type property and save is sending correct request to the server * renamed test project renamed `test_api_save_material` to `test_api_save_project` since we can only save projects now * added `SearchModes` and `ExactSearchModes` to package * removed `get_vocab()` from __init__ to speed up * api class was very slow because on every api initialization it would get the entire controlled vocabulary when it did not need to do it on initialization and could do it as it was needed * api search working successfully * formatted api with black * formatted test_api.py with black * search working well, but need to change all classes * search working well, but need to change all classes * added `node` to all dataclass of all nodes * added `node` field to all nodes dataclass to be used when needed by API.search() * renamed enum from `EXACT_NAME` to `NAME` * exact search has problem because API answers are not uniform * formatted material.py with black * search working fully and well! -------------------------------------------- Success: * search works with node type * search works with contains name * search works with exact name * search works with UUID -------------------------------------------- Notes: * removed `search_exact` because not needed * removed `ExactSearch` enum because not needed anymore. `api.search` handles it all * broke up tests for search into different search mode tests * always passing in a page number of 0 because it seems to have no effect on the API, but API docs specify certain places that take page numbers * paginator currently not raising `InvalidPageRequest` even though in docstrings says that it does -------------------------------------------- Future: * Need to add exception handling for a lot of it * Exceptions need to be nice CRITP exceptions * formatted with black * is_node_schema_valid is working correctly! * is_node_schema_valid is working correctly! * formatted with black * tests passing correctly * `test_prepare_host` successful! * `test_get_db_schema_from_api` successful! notes: * save project not passing anymore because I changed the save method within the API, I will have to come check that later * removed `"node": "material"` from all nodes dataclass * Add schema validation to node design * update api save to test with simple_project_node * Use class name for `node` attribute (#74) * use type(self).__name__ for node * add magic to be able to use node_type for classes and instances to get the node type --------- Co-authored-by: nh916 * removing api methods unsupported by API * delete() * get_my_user() * get_my_groups() * get_my_projects() * fixed vocab that was missing return statement * remove wrong node_type * formatted with black * added install.yml again * formatted with trunk * optimized imports and removed unused variable * formatted with trunk * added rough documentation for API * changing variable names to what data model stated * Project: had materia**s** and data model specified material * Experiment: had citation and the data model specified citation**s** * formatted docs/api.md with trunk * changed `check_initial_host_connection` to private * refactoring and renaming * api instance is private `_is_vocab_valid` and `is_node_schema_valid` all tests are passing except 2 1. `api.save()` because of deserialization 2. `api.search()` I assume because there are changes happening to the API * refactoring and renaming * api instance is private `_is_vocab_valid` and `is_node_schema_valid` all tests are passing except 2 1. `api.save()` because of deserialization 2. `api.search()` I assume because there are changes happening to the API * updated faq.md with more example code * updated api documentation * formatted examples docstrings within api search * added docstrings to Search Modes * docstrings formatting * added rough draft documentation of paginator * removed schema and vocabulary.py functions * having the schema and vocabulary methods within the global API * formatted api with black * formatted faq.md * formatted with trunk * updated FAQ * added more content * trunk formatted * separated out the Exceptions more work is needed to get these to be readable and easy to use, but it is a good start for now * trunk formatted documentation `.md` files * removing unused documentation file * renaming `get_vocab` to `fetch_vocab` * fix test_node_util * change token management and add dummy token * fix experiment * fix api __enter__ * also make CRIPT_HOST a token * refine node expectation for api check * test api with env variables only (we don't have token and host ready otherwise) * removed test_vocab_and_schema.py * the vocab and schema are no longer needed as the tests are captured within test_api.py * fixed error for api * api tests passing except for save * add auto node validation * fix inventory * prefix process * fix material identifiers * commenting out the `api.save()` test * fixup material * fix author * enough fixing for now * added exception handling to getting the db_schema * renaming `fetch_vocab` to `get_vocab` this way it stays consistent with `get_db` * updated TODO * removing developer documentation from docs navbar * fixed documentation issues for collection.md * fixed docstrings documentation link * fixed documentation warnings for reference and mkdocs.yml * trunk fix * add missing s's * revert type thing * computational forcefield * making some progess on test_nodes_util.py * fix computational forcefield * remove material keyword validation * some initial uuid work * fixed typo * updated collection, and tests passing! * reference tests successful! * project tests passing successfully! * removing extra parameters in `test_create_simple_experiment` * `complex_quantity_node` fixed `uncertainty_type` `uncertainty_type` was incorrect vocabulary "std" is invalid vocabulary * Quantity `uncertainty` must be str * first 2 `computational_process` tests passing * draft uuid base node * inheritance design for uuid and url * intermediate step * optimze search * fix search errror * added `simple_computational_process_node` * move material property to the end to avoid overwriting it * all experiment tests passing! * importing `simple_computational_process_node` fixture * computation tests successful! * data node tests successful! * flattened identifiers successfully! * added `simple_computational_forcefield_node` * added `simple_computational_forcefield_node` to import * material first 2 tests passing test_create_simple_material test_all_getters_and_setters * material test for `computation_forcefield` failing * removing process from material.py * removing process from material.py * serializer for material identifier working correctly * primary_nodes: updated simple_inventory * primary_nodes: added `simple_material_dict()` * added simple_material_dict to conftest.py * inventory nodes passing * added fixtures to primary_nodes.py and cleaned * cleaned up fixtures to use other fixtures instead of rewriting the nodes * removed use of copy from fixtures * `simple_computation_process_node` * cleaned up `complex_material_node` to use fixtures * updated subobjects fixtures * `complex_quantity_node` has `uncertainty` as string instead of float because db schema demands that * changed `complex_ingredient_node` `keyword` to array instead of a single word as db schema wants it * created `simple_condition_node` * added to conftest.py * renamed variables in test_property.py * fixed `test_serialize_computational_process_to_json` * updating util.py to work with serializer * changing node_dict to load from JSON * added `_is_node_field_valid` function * added comments and docstrings to core.py * made process.py attributes singular * updated test_process.py * added helpful TODO * added helpful TODO * move uuid base to own module * trunk fmt * trunk fmt * revert json.loads instead of dict * fix minor * fixed material up again * implemented bad deserialization for material.identifiers. * material passes now, but it is definitely not compatible. There is something fishy here. * break tests even more, by not silently ignoring wrongly set attributes * material passes test now * fix keyword in process * revert property_ * fix inventory * enable json schema test before loading nodes * fix unused imports for user * fix material only gets identifiers * allow only key and unit update together for quantities * remove json validate before loading * fix qunatity dict| * add breaking parent material again * remove api db length check * remove leftover prints * fix property * reshuffle API host access * removed public access to api modifiers * improve error message on attempted attribute setting * add functionality to error about extra non-tolerated arguments furing node construction. * UUID support for all nodes that need it (#91) * some initial uuid work * draft uuid base node * inheritance design for uuid and url * intermediate step * move uuid base to own module * trunk fmt * reshuffle API host access * removed public access to api modifiers * host idea (#92) * host idea * update * update --------- Co-authored-by: nh916 * fix some smaller issues * remove prepare host test * eliminate extra process argument * fix issues with collection fixture * enable test failure again * removed duplicate `_http_headers` class variable I think there was accidentally a duplicate class variable `_http_headers` and I removed one of them * parameters work now * fix quantity.uncertainty * removing identifier in `__init__` as it doesn't exist seems like identifier subobject was imported into `subojects/__init__.py` before. However, since we deleted `identifier.py` we forgot to delete it from `subobjects/__init__.py` and it gives an error when code is ran * removing identifier in all `__init__` as it doesn't exist seems like identifier subobject was imported into all `__init__.py` in the past However, since we deleted `identifier.py` we forgot to delete it from `subobjects/__init__.py` and it gives an error when code is ran. Deleting it from all the imports now * fix parameter test * spelling of algorithms * first draft condense edges to uuid * remove group node * minor mods to computationProcess * fix more issues * fixing lots of data plural errors * fix more * fix equipment * fix property * temp * fix experiment * fix how condensing worked * fix util tests * get material identifiers dynamically (#96) * created `get_vocabulary_by_category` api method * wrote the api method for `get_vocabulary_by_category` * wrote tests for `get_vocabulary_by_category` * using `get_vocabulary_by_category` within material identifiers * optimizing `get_vocab_by_category()` to use cache * updated `_get_vocab()` docstrings * removed TODO `_get_vocab()` comment * updated the `_get_vocab()` method * `get_vocab_by_category()` caching everything inside of `_vocabulary` * `_get_vocab()` using `get_vocab_by_category()` * using enums for `ControlledVocabularyCategories` * refactoring to use enums * got all api tests to pass * removing InvalidVocabCategoryError because vocab categories can no longer be invalid because they come from enum * completed TODO for `_get_vocab` * removed `InvalidVocabularyCategory` exception and from all files using it * updated the tests * switched some returns Union[bool, exception] because it was only returning the boo and raising the exception * completed some TODO and removed them * reformatted with black * material tests are passing upgraded serializer and deserializer to use dynamic material identifiers from the API controlled vocabulary * using list comprehension to compact the code a bit * added docstrings to `_from_json()` * updated docstrings to `_from_json()` * upgraded `test_get_vocabulary_by_category()` * moved `_deserialize_flattened_material_identifiers` into its own file * updated comment * updated comment * fix util mess * fix utils more * updated outdated error * rename exception * Feature: API config.json (#102) * added config option to API class * added config file option to api * updated to pass all tests for config file * formatted with trunk * updated api __init__ config docstrings changed docstring for config_file_path from path object and str to just str to reflect the code * added trunk and tests to run on all branches updated github workflow * ultra condense material edges * make the ingredient test worse to get it passing * fix merge chaos * make complicated function more readable * Feature: Subobjects Documentation (#104) * added docs for subobjects * updated documentation for algorithm subobject * Create computation_process.md * added docstrings to citation subobject * fixed circular import * added docstrings documentation to computational_forcefield subobject * added documentation to Condition subobject * added docstring documentation and type hinting to equipment subobject * added ingredient subobject documentation * added CRIPT data model page link to the ingredient documentation * wrote first draft of parameter subobject docs * updated ingredient `Available Subobjects` to h2 * first draft of the property subobject documentation * first draft of Quantity subobject documentation * first draft of `software_configuration` documentation * `Software` node documentation first draft * updated ingredient subobject to have return statements * formatted with trunk * fixed `Property` sub-object * updated spelling from `subobject` to `sub-object` * changing docs to run on `validate-nodes` branch docs can be changed to run on develop and main later * Update docs.yaml to run on `add_docs` * allowing docs workflow to run manually too * fixed spacing issue and italics issue * synced up `Attributes table` and Python class * updated code examples for sub-objects * formatted with trunk * trunk format * added documentation for vocabulary categories (#105) --------- Co-authored-by: Ludwig Schneider * added placeholder for Crash Course Docs * Cspell (#108) * add cspell checkint * add cspell config * fix all spelling * Document node exceptions (#110) * added the missing exceptions * documented node exceptions * formatted with trunk * upgraded Exceptions * formatted with black * update raising of `CRIPTNodeSchemaError` * caught and fixed spelling mistakes with `cspell` * docs runs on `main` and `validate_nodes` branch * docs runs on `validate_nodes` branch for now * docs runs on `validate_nodes` branch for now * Update primary nodes type hints (#117) * added type hinting for `Computation` getter and setter * updated type hinting for `Data` getter and setter * updated type hinting for `Computation_Process` getter and setter * removed commented out import statement * making the file lighter * can be added when needed * updated `Reference` type hints * removed commented out import statement * removed commented import statement * removed unneeded commented out import in process.py * added complex material test (#125) * added documentation for cript.utils (#89) * added documentation for cript.utils * changed documentation to allow all utils * trunk fmt --------- Co-authored-by: Ludwig Schneider * add test to repeat citation * mend * fix user node * removing `Group` and `Identifiers` because they are not supported for now * formatted mkdocs.yml with trunk * fix issue * fix project * fix process * disable uid test, since incompatible so far. * make the last missing test pass * remove help --------- Co-authored-by: nh916 * setup merge-q actions (#145) * remove depency test (#1) (#149) # Description ## Changes ## Tests ## Known Issues ## Notes ## Checklist - [ ] My name is on the list of contributors (`CONTRIBUTORS.md`) in the pull request source branch. - [ ] I have updated the documentation to reflect my changes. * add member and admin to collection (#146) * UUID base all (#150) # Description According to @brili all nodes (including subobjects) have a UUID, and a few other attributes. So, I added those attributes to the UUIDBaseNode and have all nodes inherit from it. ## Changes ## Tests ## Known Issues This might be better done with refactoring, and considering how BaseNode and UUIDBaseNode now serve a similar function. ## Notes ## Checklist - [x] My name is on the list of contributors (`CONTRIBUTORS.md`) in the pull request source branch. - [ ] I have updated the documentation to reflect my changes. Documentation isn't up to date with this. * Create CODE_OF_CONDUCT.md (#124) # Description Created a basic Code of Conduct for the repository aided by GitHub to establish guidelines for contributors. The Code of Conduct promotes inclusivity, respectful communication, and prohibits harassment and discrimination. * add process back into material (#148) # Description Process is again part of the material node. ## Changes I think it used to be an array, now it is a single value. ## Tests Testing this like ususal. ## Known Issues None ## Notes ## Checklist - [x] My name is on the list of contributors (`CONTRIBUTORS.md`) in the pull request source branch. - [ ] I have updated the documentation to reflect my changes. Not sure about documentation, the old one with the process was still in. * removing downloading of db schema (#157) # Description I think since @InnocentBug did an amazing job integrating the SDK and API, downloading and saving the db schema on every save will no longer be needed and we can remove it. However, if we need it still I am happy to add it to `.gitignore` and keep it ## Changes * removing downloading of db schema from `conftest.py` ## Tests ## Known Issues ## Notes ## Checklist - [x] My name is on the list of contributors (`CONTRIBUTORS.md`) in the pull request source branch. - [x] I have updated the documentation to reflect my changes. * file node inheriting from PrimaryBaseNode.py (#158) * file.py inheriting from PrimaryBaseNode.py * formatted and fixed unused import in file.py * fixed `computational_process` JSON test with file since the file node changed to include all base attributes the JSON dictionaries have to change as well to include the correct dictionary fields * fixed test data `test_serialize_data_to_json` with file since the file node changed to include all base attributes the JSON dictionaries have to change as well to include the correct dictionary fields * fixed test data `test_data_getters_and_setters` with file node since the file node changed to include all base attributes the node instantiation needs the name field within the constructor as well * fixed test experiment `test_experiment_json` with file node since the file node changed to include all base attributes the expected dictionary fields must change as well! * formatted test_experiment.py with black * removed unused fixture/test * removed unused import * added documentation to the constructor of file node * Create `mypy CI` & `Fix mypy typing` errors & add `@beartype` (#151) * fixed typings for collection passing mypy * fixed computation.py typings with mypy * ignoring list copy typing error * in case the list is None the SDk would give an attribute error and it would be no fault of the user * fixed computation_process.py typings with mypy * fixed data.py typings with mypy * fixed experiment.py typings with mypy * fixed experiment.py typings with mypy ignored `identifiers: List[dict[str, str]]` typing getting an error of: ``` error: Argument "default_factory" to "field" has incompatible type "Type[Dict[Any, Any]]"; expected "Callable[[], List[Dict[str, str]]]" [arg-type] ``` * fixed process.py typings with mypy * fixed project.py typings with mypy * fixed reference.py typings with mypy * fixed algorithm.py typings with mypy * fixed citation.py typings with mypy * fixed computational_forcefield.py typings with mypy * fixed ingredient.py typings with mypy * fixed parameter.py typings with mypy * not sure how to fix quantity.py typings * updated paginator.py typings * updated api.py typings * formatting with trunk * adding `@beartype` to api.py * adding `@beartype` to paginator.py * adding `@beartype` to collection.py * adding `@beartype` to reference.py.py * adding `@beartype` to project.py.py * adding `@beartype` to process.py * adding `@beartype` to primary_base_node.py * adding `@beartype` to material.py * adding `@beartype` to inventory.py * adding `@beartype` to experiment.py * adding `@beartype` to data.py * adding `@beartype` to computation_process.py * adding `@beartype` to software_configuration.py * adding `@beartype` to software.py * adding `@beartype` to quantity.py * adding `@beartype` to property.py * adding `@beartype` to parameter.py * adding `@beartype` to ingredient.py * adding `@beartype` to equipment.py * adding `@beartype` to condition.py * adding `@beartype` to computational_forcefield.py * adding `@beartype` to citation.py * adding `@beartype` to user.py * adding `@beartype` to file.py * Add beartype check (#155) * adding `@beartype` to api.py * adding `@beartype` to paginator.py * adding `@beartype` to collection.py * adding `@beartype` to reference.py.py * adding `@beartype` to project.py.py * adding `@beartype` to process.py * adding `@beartype` to primary_base_node.py * adding `@beartype` to material.py * adding `@beartype` to inventory.py * adding `@beartype` to experiment.py * adding `@beartype` to data.py * adding `@beartype` to computation_process.py * adding `@beartype` to software_configuration.py * adding `@beartype` to software.py * adding `@beartype` to quantity.py * adding `@beartype` to property.py * adding `@beartype` to parameter.py * adding `@beartype` to ingredient.py * adding `@beartype` to equipment.py * adding `@beartype` to condition.py * adding `@beartype` to computational_forcefield.py * adding `@beartype` to citation.py * adding `@beartype` to user.py * adding `@beartype` to file.py * added "beartype" to `.cspell.json` * added beartype to requirements_dev.txt * trunk format * add beartype requirement --------- Co-authored-by: Ludwig Schneider * remove unneeded file * several small mypy issues * Create mypy_check.yaml * 2 more fixed * made the package versions exact * put `beartype` into requirements.txt and removed it from requirements_dev.txt because it is already included in requirements.txt * updated coverage because a patch was available * formatted with isort * formatted with isort and removed unused imports * fixed dict typing and added `process` that was missing before * fixed dict typing for material.py * telling mypy to ignore some types * ignoring typing error for getters because within setters and constructor we require the correct value, however, in case it would be None, it would break everything. In the future we should try to figure out if we can fix this so the typings always work without us having to ignore them * fixed api mypy typing errors * formatted with trunk * fixing imports with isort * fix overshadow * ignore this type * fix ingredient keyword * fix material compF and parent * make collection pass * fix condition * fix material test * fix property * check for none * fix project test * fix more subobjects * add half the UID deserialization support (random order is missing) * make mypy trunk required * reduce test number * trunk merge q * changing the package to exact version --------- Co-authored-by: Ludwig Schneider * Feature: `API.upload_file()` & `API.download_file()` (#156) # Description added AWS S3 file upload and file download to API class ## Changes * brought all AWS S3 variables to class level * file upload and download working * during file upload the file name is changed to `file_name_uuid4_hex.extension` * using UUID4 hex to not have dashes in the file name and have it be a bit cleaner * `uuid.uuid4().hex` * wrote a test for both file upload and download * changing the `_BUCKET_DIRECTORY_NAME` of `cript.API` within `conftest.py` to be sure tests only go into `tests/` directory of the AWS S3 storage * `test_upload_and_download_file` was originally written in a complex way, but using some python functions I was able to simplify it * Storing the cloud storage object_name inside of the file node source attribute * e.g. `user_data/7244ed91cafa430aacf079a13ec7cc5e.txt` ## Tests * `test_upload_and_download_file()` passes locally with a toke, but the test is commented out to pass all tests on the GitHub CI * The test essentially uploads a temporary file from a temporary directory and then downloads it to be sure the contents are the exact same. ## Known Issues * testing file upload to our AWS S3 bucket is problematic because every time we run this test we are uploading a new file to AWS S3, this could increase our storage costs * I have already ran it a bunch of times during testing and development * Good news is that all test files are inside of the `tests/` directory, are all `.txt` files with clear text that says that it is an automated test, so if we want to periodically clear out that directory it should be okay, but this creates more maintenance work * the UX feels a bit weird to me because the user would have to pass in the directory, file name, and extension to the function which the user might be less intuitive or the user might not know the file extension * further error handling for both upload and download might be needed * Boto errors are not intuitive and harder for the user to understand * The biggest issue with the current implementation that I see is that the AWS S3 bucket sends back bytes and does not tell me the file extension. * We could try to find the file extension from the file node `extension` attribute and add it onto there * However, it is not guarantied that the user will always give a file extension * ignoring all boto types because I do not know how to fix them and mypy is throwing errors ## Notes * I was thinking of encapsulating the `_s3_client` client in the `upload_file` method because it is only used there, but all other variables for s3 are already at class level because I think this could make changing the S3 settings easier for maintenance. Switching from class level to method would be very easy and I can do it if we think that is a better way to go. ### Thoughts [I think file upload and download should be at API level](https://trello.com/c/SHsl4plZ) I think a workflow like this might be easier: 1. We could create an API endpoint like `https://mycriptapp.org/files/upload`. 2. Then, we could send an HTTP POST request to that URL with the file. 3. The API would store the file wherever it wants and respond back with the URL of the stored file. 4. The file URL from the API can then be used as a file node source attribute to generate a giant JSON with only web file sources and no local file sources anymore. This approach offers several advantages: - The API has control over the files, ensuring consistent conventions. - It promotes uniformity, so the frontend doesn't save files like `_file_123456_789012.pdf`, and the SDK doesn't save it like `123456_file_name_789012_.pdf`. - It allows for easy switching of storage providers (e.g., from AWS to GCP) without impacting clients interacting with the API. I think this might be a good idea as it simplifies the workflow, promotes consistency, and allows for flexibility in the future. * optimize GitHub CI (#164) * Update test_coverage.yaml * Update mypy_check.yaml to use only python 3.11 * renamed workflow to simplify and added comments * Feature: automatic uploads for File nodes with local file source and `cript.File.Download` (#161) * wrote upload and download method within file node * added test for file node upload and download, but not tested yet * file can upload and download correctly * changed `self._s3_client.upload_file` to named arguments named arguments are easier to understand and work with especially with the boto package * changed conftest.py to use the correct cloud bucket directory name meant for tests * commenting out `API.file_upload` and `API.file_download` * formatted api.py and file.py with black * file passing mypy and upload and download test * commenting out S3 client to pass tests on GitHub CI * commenting out file node upload local file to pass tests on GitHub CI * optimized imports and removed unneeded imports putting test specific import at test level, that is commented out and cannot run on GitHub CI * commented out `test_create_file_local_source()` * Update api.py (#166) * Update api.py changing `_BUCKET_DIRECTORY_NAME` for Python SDK * Update CONTRIBUTORS.md adding @fatjon95 to contributors * Update CONTRIBUTORS.md removing extra spaces * Update simple-issue.md template (#159) * Update simple-issue.md * fix issue template --------- Co-authored-by: Ludwig Schneider * removed `crash_course.md` file because we have quick start example instead * swapping the order for navigation with exception and utility functions * wrote cript_installation_guide.md copied over cript_installation_guide.md from the last SDK where I wrote it Not sure if this is fully needed or nto * changed navigation `API` section to `API Client` * created `how_to_get_api_token.md` copied over `how_to_get_api_token.md` from the last tutorial where I wrote it. The images and text do not line up yet and need to be updated * added image border class styles to use within my documentation * updated how_to_get_api_token.md commented out pictures because they are not currently applicable and look really bad the rest of the UI I think looks okay for now * updated .cspell.json * fix styles * update synthesis example from old SDK (#163) * update synthesis example from old SDK * put example script into documentation and updated it put example/ script directory inside of the docs/ directory to make it appear within the documentation updated documentation a bit to have links to nodes and any improvements that I could find * final touches to the example * updated admonitions formatting * fix trunk errors * fixed mkdocs formatting * fixed markdown formatting * disable trunk prettier for documentation to not mess it up. Maybe we can add it later again --------- Co-authored-by: nh916 * fixed markdown formatting (#171) * added abstract at the top to easily tell the audience what the documentation is about * reordered text to make it easier to read through * Add type hints to `cript.API.upload_file()` (#173) * added type hints to `cript.API.upload_file()` * fixed grammar of comment * all types are correct and added comment * Remove identifier and keyword validation functions from material.py (#170) * started on integration testing, but needs more work * removing unused material keyword and identifier validation check since we are using the db schema for it * removing `_validate_keyword` from constructor and keyword setter * removing integration test from material tests integration test was accidentally placed here when switching between branches * remove integration test fully from test_material.py * postpone file update until project save (#175) * postpone file update until project save * add runtime comments into * rename internal s3_client * Allow the user to specify api that is used to upload files. * spell check * implement Navids changes * fix mypy error * Update Documentation FAQ.md (#188) * Update faq.md * added FAQ for security issues * updated faq * add simulation example notebook (#174) * add simulation example notebook * address navids comments * added the example code walkthroughes to the navigation menu (#191) --------- Co-authored-by: nh916 * adding warning for integration tests for API * added JSON representation to all nodes in documentation (#192) * added collection JSON representation to collection.py * added collection JSON representation to computation.py * added collection JSON representation to computation_process.py * added collection JSON representation to data.py * added collection JSON representation to experiment.py * added collection JSON representation to inventory.py * added collection JSON representation to material.py * added JSON representation to process.py and fixed the formatting of the table * added JSON representation to project.py * added JSON representation to reference.py * added JSON representation to software_configuration.py * added JSON representation to equipment.py * added JSON representation to ingredient.py * added JSON representation to quantity.py * added JSON representation to parameter.py * added JSON representation to property.py * updated computation_process.py * added JSON representation to software.py * updated computation_process.py `JSON Representation` * updated project.py `JSON Representation` * updated equipment.py `JSON Representation` * updated equipment.py `JSON Representation` * updated material JSON with polystyrene bigsmiles instead of invalid * fixed broken link in data.py and formatted the table * updated material json from smiles to bigsmiles * All Nodes Create Integration Tests, and changes to make it work (#168) * started on integration testing, but needs more work * cript.API.search removed typing for node_type for now beartype kept complaining that node project is not of type BaseNode, so I removed the typing for now for easy testing and will add it after and debug it * test_material.py wrote integration test, but currently has issues passing * adding a * posting to DB and getting it works, but deserialization doesn't * posting to DB and getting it works, but deserialization doesn't * removed unneeded name changes * wrote integration test for Project node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * wrote integration test for collection node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * wrote integration test for experiment node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * wrote integration test for inventory node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * massively cleaned up project integration test function using a helper function for integration test functions because there is a lot of common code between all integration tests * massively cleaned up collection integration test function using a helper function for integration test functions because there is a lot of common code between all integration tests * removed unneeded comment * added docstring to project integration test * added integration test for inventory inventory integration test is failing with an API error of: `Bad uuid: 27da914c-65f1-4e8f-9797-5633d2fe0608 provided` * renaming project and collection node names for integration tests * refactoring `test_material/test_integration_material()` * wrote integration test for simple process node * created `complex_process_node` fixture * added `complex_process_node` fixture * wrote integration test for process * test_integration_simple_process * runs but cannot deserialize * test_integration_complex_process * takes forever to run and the schema validation comes out wrong * wrote integration test for data node * wrote integration test for computation node * renaming project name for integration test * started on integration test for computation_process wrote the first draft, but getting `CRIPTOrphanedMaterialError` * worked on `test_integration_reference` citation currently missing from the API response of the project * worked on `test_integration_condition` getting orphaned nodes * wrote `test_integration_file` * make user email, orcid optional * deserializing within integration test to node * checking node json vs api json to node to json * patch invalid uids out * made experiment integration test function DRY * fixing `complex_process_node` fixture * was not returning the node from the fixture, but now returning it * fixed type hinting for user getters mypy found errors because we said within the ORCID and user email that we will always return a string but they can now be optional so I updated the getters for `ORCID` and `user email` to `Union[str, None]` * updating `complex_property_node` * inputting named arguments for complex property sub-object * changing notes from `"notes"` to `"my complex_property_node notes"` to easily know that the notes are coming through correctly * renamed variable to make reading it easier * wrote `test_integration_material_property` but getting `CRIPTOrphanedMaterialError` * wrote `test_integration_process_condition` but getting `CRIPTOrphanedMaterialError` * wrote `test_integration_material_ingredient` but getting `CRIPTOrphanedProcessError` * wrote `test_integration_quantity` but getting `CRIPTOrphanedProcessError` * updated `complex_equipment_node` * added docstrings * made it into named arguements during instantiation * renamed the variable from `e` to `my_complex_equipment` * changed description a bit to help identify it if needed in tests * wrote `test_integration_process_equipment` but it is getting `CRIPTNodeSchemaError` * formatted test files and fixture with black * updated `complex_computational_forcefield_node` fixture * changed it to named arguments * changed the variable name * added to description to easily identify it if needed * added minimal docstrings to it * wrote `test_integration_material_computational_forcefield` but getting `CRIPTOrphanedDataError` * updated `complex_software_configuration_node` fixture * changed it to named arguments * changed the variable name * added to notes to easily identify it if needed * added minimal docstrings to it * commented out assertion in `integrate_nodes_helper` doing this for now to check which nodes can even be made correctly and fix whatever internal errors we have first, and then tackle checking the JSON against the API JSON * `test_integration_software_configuration` written correctly just needs to check the JSON against the API and we'll know what to do for sure * updated project name for `test_integration_software_configuration` * wrote `test_integration_algorithm` and working correctly right now just needs to make the assertion correctly to compare SDK and API JSONs later * * updated `complex_parameter_node` fixture * changed it to named arguments * added minimal docstrings to it * * updated `complex_algorithm_node` fixture * changed it to named arguments * added minimal docstrings to it * wrote `test_integration_algorithm` working correctly, just needs to have SDK and API JSON checked * wrote `test_integration_parameter` getting `CRIPTJsonDeserializationError` * upgraded `complex_citation_node` fixture * made it into named arguments during instantiation * added minimal docstrings to it * wrote `test_integration_citation` test is mostly working, but just needs to be checked against the API and SDK JSON * changed order of the print statements to make more sense * save * trying compare JSONs for what we sent and recieved * removing `try` `catch` block to handle API duplicate projects errors because the project has a unique name with UUID and can no longer be a duplicate in DB * deepDiff with `exclude_regex_paths` not working for comparison it keeps giving me changes of things that I told it to ignore * deepDiff catching the correct differences telling deepDiff to ignore `uid` field and the rest it only checks what they have in common. It seems to compare the dicts correctly. Also had to convert from JSON to Dict for doing the comparisons * deepDiff catching the correct differences telling deepDiff to ignore `uid` field and the rest it only checks what they have in common. It seems to compare the dicts correctly. Also had to convert from JSON to Dict for doing the comparisons * renaming the integration project for experiment so there is no duplicate error from API * updated docstrings for `integrate_nodes_helper` helper function * fixed `test_integration_computational_process` OrphanedMaterialNode, but having trouble with `CRIPTOrphanedProcessError` * still getting `CRIPTOrphanedProcessError` * process integration test successful! * added comment * removed print statement from test * fixed OrphanedNodeError * added todo * found an issue to fix * adding arguments to complex_condition fixture instantiation * added `simple_condition_node` * wrote `test_integration_process_condition` but getting `CRIPTJsonDeserializationError` * wrote `simple_ingredient_node` fixture * updated keyword for `simple_ingredient_node` fixture * `test_integration_material_ingredient` written but getting `bad UUID API error` * `test_integration_material_ingredient` written but getting `bad UUID API error` * updated docstring for `test_integration_ingredient` * wrote `test_integration_quantity` * fixed `simple_software_configuration` fixture put it in fixtures/subobjects removed it from fixtures/primary_nodes * `test_integration_software_configuration` successful! * adding `simple_software_configuration` fixture * adding `simple_software_configuration` fixture to conftest.py * `test_integration_algorithm` successful! * added description to `simple_equipment_node` fixture * `test_integration_equipment` successful! * `test_integration_parameter` hitting deserialization error * moved around the print statements a bit to make it easier to debug * `test_integration_material_property` successful! * `test_integration_computational_forcefield` successful! * wrote `simplest_computational_process_node` fixture * updated `test_integration_computational_process` * removed print statement from `test_integration_process_condition` * fixed `equipment/test_json` * fixed `test_property/test_json` * fixed `test_software_configuration/test_json` * switching order of print statement for debugging purposes * updated `test_computational_forcefield` and is passing * fix condition integration error: the backend was sending str values instead of numbers * added comment * wrote up design for save_helper.py for `Bad UUID` errors * fix parameter.value type issue with temporary fix * designed brute_force_save * broke save into save and send post request still needs work * put `get_bad_uuid_from_error_message` into a helper function this will make it easier to reuse code and update it when needed * wrote the loop for `brute_force_save` * Bad UUID handling (#186) * differentiate post and patch * add recursive calling for known uuid * minor tweaks * add comments * fix save recur. * test_inventory works * fix uid from back end less destructive * fic spelling mistakes * fix mypy issue (by ignoring them) * fix ingredient material bad API repsonse. * add a node cache to the UUIDBaseNode. This node cache is used to upda… (#189) * add a node cache to the UUIDBaseNode. This node cache is used to update existing UUID nodes, rather then creating a new node with the same UUID. * fix spelling * install requirements dev for tests * add an assert that makes sure to not instantiate a node twice with the same UUID * remove uuid uniqueness assertion again --------- Co-authored-by: nh916 * wrote `test_integration_software` for test_software.py successfully! * wrote host and token placeholder within conftest.py * removed unused variable * fix cspell * Refactor the save a little bit. Patch does not work. (#190) * fix import * changed software_configuration.py algorithm type in constructor to be `Optional` * commenting out integration tests to make them pass for now commenting out tests that would fail for now because otherwise it would always fail because it doesn't have a way to connect to any backend * formatted with trunk software_configuration.py * commenting out unused import json and deepdiff are unused since the function body was commented out to get it to run with the backend because otherwise it would always fail for the CI * adding warning for integration tests * making the warnings easier to track down * renamed integration test the new name makes more sense * renamed integration test the new name makes more sense --------- Co-authored-by: Ludwig Schneider * Updated README.md and added CI badges (#141) * added CI badges to README.md * updated `contribution` section within README.md * added badges for dependencies we are using * added badge for dependency: pytest-cov * added badge for dependency: Pytest-doctest * added badge for dependency: Coverage * GitHub workflows CI/CD * Hosting: GitHub Pages * Update README.md fixed coverage.py logo * added release notes section to README.md * reduced the amount of badges to only essentials * updated "invite contributions section wording" * fix trunk * added latest release badge to README.md * updated latest release badge * removed latest release badge * removed dependency-review merge queue is creating issues with dependency review CI --------- Co-authored-by: Ludwig Schneider * removed unneeded tests from test_api.py (#198) * removed `test_api_update_material` & `test_api_delete_material` tests from tests/api/test_api.py wrote these tests earlier, but I don't think they'll be needed * removed save test from test_api.py integration tests for all nodes do this better than this test in the test_api.py the original idea was to have a basic save to know that the save works, but I don't think it is needed after writing the integration tests * removed comment from test_api.py * updated _is_local_file to handle `URL`, `AWS S3 object_name`, `absolute file path`, `relative file path` (#200) * updated _is_local_file to handle file source URL links and S3 object_name created a good test for it with good test cases * updated file name and docstrings for it * updated docstrings to make test clearer * fix cspell config --------- Co-authored-by: Ludwig Schneider * Feature: Download `file.source` that is URL (#196) * first draft of download_web_file * putting runtime type checker on `api.download_file` to catch any possible bad input from the user * wrote test and it is successful! * formatted with trunk * formatted with black * updated docstrings * fixed mypy error * optimized imports * updated * updated * upgrade documentation (converted HTML links to MD) (#203) * upgraded simulation.md converted html links to MD links * converted HTML links that open in a new tab to MD links that open in the same tab * MD links are better because: * more consistent with the rest of the documentation * make more sense to have in md file * MkDocs can catch broken MD links, but not broken HTML Links, so it gives us a layer of protection * there were some places that there was opportunities to have links and I made the phrases into links * fix spelling error * updated/upgraded requirements_docs.txt * converted html link to md links * added screenshot picture to how_to_get_api_token.md * added screenshot picture to how_to_get_api_token.md * upgraded index.md * changed HTML links to MD links * added CRIPT Scripts as a resource * added resources to CRIPT index.md * put resources inside of collapsible * updated index.md * formatted with trunk * fixed documentation screenshot of API token (#204) * Feature: getting token from frontend. `API Client class` has `API Token` and `Storage Token` (#202) * frontend implemented feature to get token from UI updated code to reflect using of the new token * renamed `token` to `http_token` * update * update * updated getting token from config file * change `file_upload_token` into `storage_token` * refactored into using `http_token` and `storage_token` * updated test_api.py config file option to work correctly * updated conftest.py with new host, http_token, storage_token * updated documentation with new token * added `storage_token` to docstrings of api __init__ * updated test_api.py to include `storage_token` * updated tests for CI/CD * updated `test_upload_and_download_local_file` * commented out `test_upload_and_download_local_file` for CI * formatted with black * ignoring type errors * updated conftest.py * formatted with black * wrote `test_api_cript_env_vars` for test_api.py * verified environment variables host and token with test * skipping tests correctly * skipping tests with pytest skip tests and giving a good reason for each one * removing unneeded `cript_api` argument for tests that don't need it * uncommented the tests and the imports at the top * optimized imports as well * fixed broken test `test_api_with_invalid_host` * updated * formatted with black * renamed `http_token` to `api_token` * renamed `http_token` to `api_token` within tests all tests passing successfully * updated conftest.py * updated synthesis.md with the new API tokens made it into named arguments * updated synthesis.md to work with new storage token * updated simulation.md to work with new storage token * annotating with pytest.skip to skip the tests that need API * formatted with black * updated test_api.py * undoing changes to test_integration.py the skip did not work on it because it is only a function and the pytest skip decorator needs to go on all the actual tests for it to skip correctly * formatted with black * renamed the parameter for file download because it can be both AWS S3 object_name or file URL * refactored `cript.File.download()` to get the file name from the node itself * `cript.File.download()` gets the file name from the node itself instead of asking for it in the method * the file extension str is manipulated to be uniform and work correctly regardless of how it was inputted * updated documentation for `cript.API.download_file()` (#205) wrote documentation for `cript.API.download_file()` for S3 & URL file sources * Handle patch error messages with SDK (#193) * started on integration testing, but needs more work * cript.API.search removed typing for node_type for now beartype kept complaining that node project is not of type BaseNode, so I removed the typing for now for easy testing and will add it after and debug it * test_material.py wrote integration test, but currently has issues passing * adding a * posting to DB and getting it works, but deserialization doesn't * posting to DB and getting it works, but deserialization doesn't * removed unneeded name changes * wrote integration test for Project node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * wrote integration test for collection node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * wrote integration test for experiment node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * wrote integration test for inventory node * can create a project node * can get a project node * cannot deserialize a project node from API to a project node * cannot assert that they are equal for now * massively cleaned up project integration test function using a helper function for integration test functions because there is a lot of common code between all integration tests * massively cleaned up collection integration test function using a helper function for integration test functions because there is a lot of common code between all integration tests * removed unneeded comment * added docstring to project integration test * added integration test for inventory inventory integration test is failing with an API error of: `Bad uuid: 27da914c-65f1-4e8f-9797-5633d2fe0608 provided` * renaming project and collection node names for integration tests * refactoring `test_material/test_integration_material()` * wrote integration test for simple process node * created `complex_process_node` fixture * added `complex_process_node` fixture * wrote integration test for process * test_integration_simple_process * runs but cannot deserialize * test_integration_complex_process * takes forever to run and the schema validation comes out wrong * wrote integration test for data node * wrote integration test for computation node * renaming project name for integration test * started on integration test for computation_process wrote the first draft, but getting `CRIPTOrphanedMaterialError` * worked on `test_integration_reference` citation currently missing from the API response of the project * worked on `test_integration_condition` getting orphaned nodes * wrote `test_integration_file` * make user email, orcid optional * deserializing within integration test to node * checking node json vs api json to node to json * patch invalid uids out * made experiment integration test function DRY * fixing `complex_process_node` fixture * was not returning the node from the fixture, but now returning it * fixed type hinting for user getters mypy found errors because we said within the ORCID and user email that we will always return a string but they can now be optional so I updated the getters for `ORCID` and `user email` to `Union[str, None]` * updating `complex_property_node` * inputting named arguments for complex property sub-object * changing notes from `"notes"` to `"my complex_property_node notes"` to easily know that the notes are coming through correctly * renamed variable to make reading it easier * wrote `test_integration_material_property` but getting `CRIPTOrphanedMaterialError` * wrote `test_integration_process_condition` but getting `CRIPTOrphanedMaterialError` * wrote `test_integration_material_ingredient` but getting `CRIPTOrphanedProcessError` * wrote `test_integration_quantity` but getting `CRIPTOrphanedProcessError` * updated `complex_equipment_node` * added docstrings * made it into named arguements during instantiation * renamed the variable from `e` to `my_complex_equipment` * changed description a bit to help identify it if needed in tests * wrote `test_integration_process_equipment` but it is getting `CRIPTNodeSchemaError` * formatted test files and fixture with black * updated `complex_computational_forcefield_node` fixture * changed it to named arguments * changed the variable name * added to description to easily identify it if needed * added minimal docstrings to it * wrote `test_integration_material_computational_forcefield` but getting `CRIPTOrphanedDataError` * updated `complex_software_configuration_node` fixture * changed it to named arguments * changed the variable name * added to notes to easily identify it if needed * added minimal docstrings to it * commented out assertion in `integrate_nodes_helper` doing this for now to check which nodes can even be made correctly and fix whatever internal errors we have first, and then tackle checking the JSON against the API JSON * `test_integration_software_configuration` written correctly just needs to check the JSON against the API and we'll know what to do for sure * updated project name for `test_integration_software_configuration` * wrote `test_integration_algorithm` and working correctly right now just needs to make the assertion correctly to compare SDK and API JSONs later * * updated `complex_parameter_node` fixture * changed it to named arguments * added minimal docstrings to it * * updated `complex_algorithm_node` fixture * changed it to named arguments * added minimal docstrings to it * wrote `test_integration_algorithm` working correctly, just needs to have SDK and API JSON checked * wrote `test_integration_parameter` getting `CRIPTJsonDeserializationError` * upgraded `complex_citation_node` fixture * made it into named arguments during instantiation * added minimal docstrings to it * wrote `test_integration_citation` test is mostly working, but just needs to be checked against the API and SDK JSON * changed order of the print statements to make more sense * save * trying compare JSONs for what we sent and recieved * removing `try` `catch` block to handle API duplicate projects errors because the project has a unique name with UUID and can no longer be a duplicate in DB * deepDiff with `exclude_regex_paths` not working for comparison it keeps giving me changes of things that I told it to ignore * deepDiff catching the correct differences telling deepDiff to ignore `uid` field and the rest it only checks what they have in common. It seems to compare the dicts correctly. Also had to convert from JSON to Dict for doing the comparisons * deepDiff catching the correct differences telling deepDiff to ignore `uid` field and the rest it only checks what they have in common. It seems to compare the dicts correctly. Also had to convert from JSON to Dict for doing the comparisons * renaming the integration project for experiment so there is no duplicate error from API * updated docstrings for `integrate_nodes_helper` helper function * fixed `test_integration_computational_process` OrphanedMaterialNode, but having trouble with `CRIPTOrphanedProcessError` * still getting `CRIPTOrphanedProcessError` * process integration test successful! * added comment * removed print statement from test * fixed OrphanedNodeError * added todo * found an issue to fix * adding arguments to complex_condition fixture instantiation * added `simple_condition_node` * wrote `test_integration_process_condition` but getting `CRIPTJsonDeserializationError` * wrote `simple_ingredient_node` fixture * updated keyword for `simple_ingredient_node` fixture * `test_integration_material_ingredient` written but getting `bad UUID API error` * `test_integration_material_ingredient` written but getting `bad UUID API error` * updated docstring for `test_integration_ingredient` * wrote `test_integration_quantity` * fixed `simple_software_configuration` fixture put it in fixtures/subobjects removed it from fixtures/primary_nodes * `test_integration_software_configuration` successful! * adding `simple_software_configuration` fixture * adding `simple_software_configuration` fixture to conftest.py * `test_integration_algorithm` successful! * added description to `simple_equipment_node` fixture * `test_integration_equipment` successful! * `test_integration_parameter` hitting deserialization error * moved around the print statements a bit to make it easier to debug * `test_integration_material_property` successful! * `test_integration_computational_forcefield` successful! * wrote `simplest_computational_process_node` fixture * updated `test_integration_computational_process` * removed print statement from `test_integration_process_condition` * fixed `equipment/test_json` * fixed `test_property/test_json` * fixed `test_software_configuration/test_json` * switching order of print statement for debugging purposes * updated `test_computational_forcefield` and is passing * fix condition integration error: the backend was sending str values instead of numbers * added comment * wrote up design for save_helper.py for `Bad UUID` errors * fix parameter.value type issue with temporary fix * designed brute_force_save * broke save into save and send post request still needs work * put `get_bad_uuid_from_error_message` into a helper function this will make it easier to reuse code and update it when needed * wrote the loop for `brute_force_save` * Bad UUID handling (#186) * differentiate post and patch * add recursive calling for known uuid * minor tweaks * add comments * fix save recur. * test_inventory works * fix uid from back end less destructive * fic spelling mistakes * fix mypy issue (by ignoring them) * fix ingredient material bad API repsonse. * add a node cache to the UUIDBaseNode. This node cache is used to upda… (#189) * add a node cache to the UUIDBaseNode. This node cache is used to update existing UUID nodes, rather then creating a new node with the same UUID. * fix spelling * install requirements dev for tests * add an assert that makes sure to not instantiate a node twice with the same UUID * remove uuid uniqueness assertion again --------- Co-authored-by: nh916 * Refactor the save a little bit. Patch does not work. * extent expection to make handling somethings more nicely * adjust JSON to for patching * wrote `test_integration_software` for test_software.py successfully! * wrote host and token placeholder within conftest.py * removed unused variable * fix cspell * Refactor the save a little bit. Patch does not work. (#190) * fix import * add some further stuff to make it better readable. * fix * fix import * fix mypy warning * convert it to iterative internal save * fix mypy * add regex comments * add comment for error message parsing * add comments * Wrote Integration Tests for Update/PATCH (#197) * starting on update integration tests * changed the line separators to easier read through the code * changed the line separators to easier read through the code * updated test_integration.py to be easier to read in terminal * update integration test for test_collection.py * update integration test for test_computation.py * wrote update integration test for test_computational_process.py * wrote update integration test for test_data.py * wrote update integration test for test_experiment.py * wrote update integration test for test_inventory.py * wrote update integration test for test_material.py * updated the update integration test for test_computation.py * updated the update integration test for test_computational_process.py * updated the update integration test for test_data.py * updated the update integration test for test_experiment.py * updated the update integration test for test_collection.py * updated the update integration test for test_inventory.py * updated the update integration test for test_material.py * updated the update integration test for test_material.py * wrote the update integration test for test_project.py * wrote the update integration test for test_process.py * made reference fixture into named arguments * wrote the update integration test for test_reference.py * wrote the update integration test for test_citation.py * wrote the update integration test for test_algorithm.py * updated the update integration test for test_collection.py * updated the update integration test for test_collection.py * updated the update integration test for test_computational_forcefiled.py * wrote the update integration test for test_condition.py * wrote the update integration test for test_equipment.py * updated integration update test * wrote the update integration test for test_ingredient.py * cleaned up test_ingredient.py integration test * wrote the update integration test for test_ingredient.py * wrote the update integration test for test_parameter.py * wrote the update integration test for test_property.py * wrote and update quantity integration test cleaned up integration test for quantity so it is easier to read wrote integration test for quantity so the value is changed for the integration test to a unique number * wrote the update integration test for test_software.py * wrote the update integration test for test_software_configuration.py * wrote the update integration test for test_file.py * updated update integration test for test_reference.py * update test_software.py * update test_parameter.py * updated formatting * formatted with black * commented out unused import * mid debugging, but fixed a bug already * updated test_software_configuration.py update integration test * updated docstrings for test_integration.py * formatted with black * formatted with trunk's isort * identified the root of an issue, that I can't fix right now * condense citation.reference and make debugging easier. * undo the reference thing * prevent empty saves * minor problem * remove dependency of paginator for save to work. Use request.get directly * fix search lower to snake_case * paginator fixfix * half way there --------- Co-authored-by: Ludwig Schneider * work around for non-working GET * mypy ignore * fix parameter test * small update * updated db schema to work with `POST` and `PATCH` * updated docstrings * fix trunk * enable is patch for validation * change save validation to respect patch * fix project.validate * revert to working state * commented out test_integration.py for CI * optimized imports --------- Co-authored-by: nh916 * upgraded repository CONTRIBUTING.md (#201) * upgraded repository CONTRIBUTING.md * wrote about * `How to Contribute` * `Issue Submission Guidelines` * `PR Submission Gudelines` * added code of conduct link to CONTRIBUTING.md * formatted with prettier * Update CONTRIBUTING.md * added `pytest` to `requirements_dev.txt` & updated outdated packages (#210) * updated outdated packages * updated outdated packages * added pytest to requirements_dev.txt * added type hints to the search modes enum (#216) * clean up tests.yml GitHub CI (#218) * upgrading tests.yml * putting environment variables within `env` block * changing steps name to make them more obvious * installing `requirements_dev.txt` in one step and pytest is a part of `requirements_dev.txt` * running tests with a single command * spacing out the steps to make it clearer and more readable * fixed trunk issue * fixed trunk issue * fixed trunk issue putting the spaces back as they were not the issue * fixed trunk issue * using python3 -m pip install with python3 -m pip install it will no longer use python2 by default * spelling error * Upgrade: Importing Fixtures with wildcard `*` into conftest.py (#219) * import all fixtures in primary, subobject, and supporting nodes this way it is imported automatically instead of being imported one at a time and if we make a new fixture, we don't have to remember to import it into conftest.py, as it will automatically get imported in * formatted with trunk * ignoring ruff formatting error * optimizing imports with isort * updated documentation for API and Storage tokens (#215) * updated token documentation * ignoring cspell and gitleaks for example JWT ignoring cspell and gitleaks for the example JWT section in the tutorial docs * putting comments around `trunk-ignore` second * putting comments around `trunk-ignore` second * adding screenshot picture as it was missed before * formatted with trunk * fix trunk ignore closing tag * fixing trunk error * fixing trunk error * Knocking out very small `TODO` comments (#220) * changing TODO comment with `pytest.skip` * changed checking db schema length in test_api.py instead of checking if the db schema is the exact same, I am checking if it has more than 30 fields db schema is always changing so this test will break often, but it should have more than 30 fields because there are at least 24 nodes * upgraded subobjects.py `simple_property_node` fixture * added docstrings * made instantiation into named arguements * changed tha variable name from `p` to `my_property` * still functions exactly the same * renamed fixture * `complex_algorithm_node` was actually minimalistic with only required arguments instead of all arguments * all tests are passing as before * upgraded `simple_property_node` fixture * cleaned up `complex_process_node` fixture * avoiding `deep_copy` as that causes issues * using simple fixtures * to avoid deep_copy and to make working with a huge node easier * using fixtures instead of remaking nodes * `complex_process` fixture is not being used in any tests * knocking out TODO in file.py * allowing for Path object in api constructor for config file * removed unneeded comment * removing `dee_copy` from `simple_process_node` simple_process_node does not need deep_copy within the fixture as is more straight forward and all tests work fine without it and * wrote test function for `_is_node_field_valid` (#224) wrote test function and test cases for `_is_node_field_valid` * Fix documentation broken links (#225) * fixed broken links in documentation * fixed broken links in experiment.py * formatted attributions table * fixed broken link * UX: Node Validation Terminal Log Output (#221) * wrote a print statement to show things are happening in the background and it is not just standing still * changing the validation terminal feedback from print to log statement the log statement is generally better practice and has better UX * give user the ability to turn off the terminal logs * added comment for `verbose` class variable * formatting for trunk * formatted api.py with black * added `levelname` to cspell for python the logger * wrote documentation for `cript.API.verbose` * trunk update (#227) * code clean up: `cript.API._is_node_schema_valid` getting `node_type` from utility function (#223) * refactoring `cript.API._is_node_schema_valid` taking the getting of node type from JSON and putting it into its own function that can be easily called and make the code cleaner * optimizing imports * ignoring mypy errors not sure how to fix the mypy errors because mypy wants to be sure that `node_json["node"]` is always a list and nothing else, but I cannot convince it of that unless I put if statements all over the code * formatted with black * Docs token security tutorial & warnings (#228) * wrote token security docs * wrote warning for loading tokens directly into scripts * wrote creating api client with env vars * wrote creating api client with `None` * added link to empty link in synthesis.md * fix bad quantity link * removed extra duplicate step from `docs_check.yaml` (#229) * fixed broken link in property node page(#230) * wrote docs for paginator & how to install SDK from GitHub (#233) * wrote docs about paginator and cript_installation_guide.md * changed search to be any `UUIDBaseNode` * removing print statement from `cript.API.search()` (#231) * fix quantity type (#234) * Updated/Upgraded documentation (#235) * added link for where algorithm can be added to * updated config.json fields for SDK API creation * updated `config.json` fields for SDK API creation * formatted attributions table * fixed missing docs for computation_process.py * fixed bad code documentation * fixed bad code documentation * fixed material node documentation description * updated material documentation code * added docs on what a process can be added to * added docs on what reference can be added to * clarified citation.py * updated file node json * clarified file node code * fixed broken link and improved consistency * fix the deep diff comparison. (#236) * fix the deep diff comparison. * refine regex * remove file write oput * optimize test output * disable integration tsts * fix minor * minor change to get tests passing * Tests integration with `ON/OFF` switch (#237) * created `HAS_INTEGRATION_TESTS_ENABLED` boolean in conftest.py * test_api.py skipping tests if `HAS_INTEGRATION_TESTS_ENABLED` is turned OFF * turning `ON/OFF` integration API test with a simple boolean * skipping some tests in file and uncommenting them * updated `test_user.py` to remove unneeded fixture updated test to use standard fixture * added boolean variable for integration test ON/OFF switch `HAS_INTEGRATION_TESTS_ENABLED` * formatted with trunk * switched `HAS_INTEGRATION_TESTS_ENABLED` to `False` * added vocabulary links to all nodes in documentation (#238) * added python sdk links to Python SDK repo tools * added link for material keyword vocab * added vocab link for data.py * added vocab link for process.py * added vocab links * added vocab links * added vocab links * renamed integration_test_helper file (#240) * upgrade `cript.API.download_file()` docs (#241) * change test coverage requirement from 90% to 89% (#242) Since integration tests are all skipped, the test coverage is lowered to 89.x% On every PR it is giving a failure notice and lowering it to 89% fixes the issue for now to get a successful response. Not sure if there are any other tests that we can add because we are pretty much covering everything * refactor enum class name from `ControlledVocabularyCategories` to `VocabCategories` (#243) * added link for documentation for vocab categories * refactored `ControlledVocabularyCategories` to `VocabCategories` * changed documentation in controlled_vocabulary_categories.md * changed example documentation * changed how it is imported into __init__.py * changed the test_api.py for it * changed how it is used in api.py * changed how it is used in material_deserialization.py * formatter with trunk * Integration Test Switch, gets value from env var (#239) * added vocab links * testing CI * last commit was for wrong branch * fixed reading boolean from env var str * added `CRIPT_TESTS` env var to test_coverage.yaml * updating requirements and putting `jupytext` into `requirements_docs.txt` (#245) * updating requirements and putting jupytext into requirements_docs.txt because it is only used within documentation * updated and formatted test_examples.yml workflow CI * Update simulation.py documentation code example (#244) * updated documentation with our new code * changed code examples * changed code organization to work with the new SDK * changed text hierarchy to all be h2 * added new headers to separate out the different parts of the tutorial * changed text hierarchy * updated env vars blog article links * made it more consistent * updated material property sub-object code * py to python * changed `CRIPT_TOKEN` to be more common sense * test code error * legacy fix --------- Co-authored-by: Ludwig Schneider * changed conftest.py to get storage token from env variable (#252) * changed conftest.py to get storage token from env variable * updated CI to have `CRIPT_STORAGE_TOKEN` * updated CI to have `CRIPT_STORAGE_TOKEN` * handle empyt UUID correctly in JSON, by assigning new UUID (#250) * Wrote documentation for `load_nodes_from_json` (#247) * wrote docs for `load_nodes_from_json` * update docs for `load_nodes_from_json` * added type hinting to `load_nodes_from_json` * update `load_nodes_from_json` function to work correctly without errors * updated docs fore `load_from_json` return type * updated docs fore `load_from_json` return type * optimized imports * added documentation for `_NodeDecoderHook` (#248) * added documentation for `_NodeDecoderHook` * fixed `_NodeDecoderHook` `def __init__()` notes * fixed `_NodeDecoderHook` `def __call__()` returns documentation * add trailing slashes to work with staging (#254) * wrote `cript.API.__str__` method (#257) * simplified tests (#260) * simplified tests and got rid of `deep_copy` * optimized imports * passed tests * updated glitchy API documentation side navigation (#258) adding `None` in header escapes the html and looks weird on the right side navigation * updated api search tests for staging and develop (#263) * updated tests for staging * trunk spelling --------- Co-authored-by: Ludwig Schneider * Wrote documentation for `NodeEncoder` class (#249) * wrote documentation for `NodeEncoder` * updated documentation for `NodeEncoder` * updated documentation for `NodeEncoder` * formatted with black * added typing for mypy * formatted with trunk * ignoring mypy static type error * updated documentation return type * formatted with black * adjust type description for condense_uuid --------- Co-authored-by: Ludwig Schneider * Std args json (#262) * add default args to JSON if not present * fix collection integration, by surpressing the right default args in JSON * trunk fixes * found some more * adjust tests * missing test fix * wrote docs explaining host (#264) * change the representation of UID (#251) * change the representation of UID * fix the two tests that were missing * this should work now * missing test fix * conftest get integration test var from env var with exception handling (#253) * updated CI to have `CRIPT_STORAGE_TOKEN` * if no env var then integration tests are true * pass empty identifier list too * uuidfy experiment/data * thx mypy --------- Co-authored-by: nh916 * wrote example code docs for algorithm `type` attribute(#265) * changed CI to use staging host instead of develop (#266) * making `cript.API.search` tests more robust for all environments (#267) * replacing all vocabulary links to new production server (#270) doing this last second * fixing file upload for production AWS S3 server (#269) doing this last second * fixing example documentation code last second (#271) doing this last second --------- Co-authored-by: Ludwig Schneider Co-authored-by: Ardi <64595901+Ardi028@users.noreply.github.com> Co-authored-by: Brilant Kasami --- .github/ISSUE_TEMPLATE/bug_report.md | 33 + .github/ISSUE_TEMPLATE/feature_request.md | 19 + .github/ISSUE_TEMPLATE/simple-issue.md | 9 + .github/pull_request_template.md | 14 + .github/workflows/build_and_deploy_docs.yaml | 21 + .github/workflows/codeql.yml | 78 ++ .github/workflows/dependency-review.yml | 28 + .github/workflows/docs_check.yaml | 38 + .github/workflows/mypy.yaml | 40 + .github/workflows/test_coverage.yaml | 49 + .github/workflows/test_examples.yml | 46 + .github/workflows/tests.yml | 48 + .github/workflows/trunk.yml | 23 + .gitignore | 48 + .trunk/.gitignore | 8 + .trunk/configs/.cspell.json | 107 ++ .trunk/configs/.isort.cfg | 2 + .trunk/configs/.markdownlint.yaml | 12 + .trunk/configs/.yamllint.yaml | 10 + .trunk/configs/svgo.config.js | 14 + .trunk/trunk.yaml | 53 + CODE_OF_CONDUCT.md | 128 +++ CONTRIBUTING.md | 83 ++ CONTRIBUTORS.md | 7 + CRIPT_full_logo_colored_transparent.png | Bin 0 -> 30023 bytes LICENSE.md | 21 + README.md | 60 +- SECURITY.md | 15 + docs/api/api.md | 1 + docs/api/controlled_vocabulary_categories.md | 1 + docs/api/paginator.md | 1 + docs/api/search_modes.md | 1 + docs/examples/.gitignore | 2 + docs/examples/simulation.md | 392 ++++++++ docs/examples/synthesis.md | 296 ++++++ docs/exceptions/api_exceptions.md | 3 + docs/exceptions/node_exceptions.md | 3 + docs/extra.css | 3 + docs/faq.md | 87 ++ .../CRIPT_full_logo_colored_transparent.png | Bin 0 -> 30023 bytes docs/images/cript_token_page.png | Bin 0 -> 72761 bytes docs/images/favicon.ico | Bin 0 -> 15406 bytes docs/index.md | 27 + docs/nodes/primary_nodes/base_node.md | 1 + docs/nodes/primary_nodes/collection.md | 1 + docs/nodes/primary_nodes/computation.md | 1 + .../primary_nodes/computation_process.md | 1 + docs/nodes/primary_nodes/data.md | 1 + docs/nodes/primary_nodes/experiment.md | 1 + docs/nodes/primary_nodes/inventory.md | 1 + docs/nodes/primary_nodes/material.md | 1 + docs/nodes/primary_nodes/process.md | 1 + docs/nodes/primary_nodes/project.md | 1 + docs/nodes/primary_nodes/reference.md | 1 + docs/nodes/primary_nodes/software.md | 1 + docs/nodes/subobjects/algorithm.md | 1 + docs/nodes/subobjects/citation.md | 1 + .../subobjects/computational_forcefield.md | 1 + docs/nodes/subobjects/condition.md | 1 + docs/nodes/subobjects/equipment.md | 1 + docs/nodes/subobjects/identifier.md | 1 + docs/nodes/subobjects/ingredient.md | 1 + docs/nodes/subobjects/parameter.md | 1 + docs/nodes/subobjects/property.md | 1 + docs/nodes/subobjects/quantity.md | 1 + .../subobjects/software_configuration.md | 1 + docs/nodes/supporting_nodes/file.md | 1 + docs/nodes/supporting_nodes/group.md | 1 + docs/nodes/supporting_nodes/user.md | 1 + docs/tutorial/cript_installation_guide.md | 55 ++ docs/tutorial/how_to_get_api_token.md | 43 + docs/utility_functions.md | 1 + mkdocs.yml | 135 +++ pyproject.toml | 26 + requirements.txt | 4 + requirements_dev.txt | 10 + requirements_docs.txt | 5 + setup.cfg | 32 + setup.py | 4 + src/cript/__init__.py | 40 + src/cript/api/__init__.py | 5 + src/cript/api/api.py | 918 ++++++++++++++++++ src/cript/api/exceptions.py | 209 ++++ src/cript/api/paginator.py | 221 +++++ src/cript/api/utils/__init__.py | 4 + src/cript/api/utils/get_host_token.py | 62 ++ src/cript/api/utils/helper_functions.py | 43 + src/cript/api/utils/save_helper.py | 164 ++++ src/cript/api/utils/web_file_downloader.py | 97 ++ src/cript/api/valid_search_modes.py | 35 + src/cript/api/vocabulary_categories.py | 99 ++ src/cript/exceptions.py | 12 + src/cript/nodes/__init__.py | 32 + src/cript/nodes/core.py | 456 +++++++++ src/cript/nodes/exceptions.py | 411 ++++++++ src/cript/nodes/primary_nodes/__init__.py | 11 + src/cript/nodes/primary_nodes/collection.py | 284 ++++++ src/cript/nodes/primary_nodes/computation.py | 456 +++++++++ .../primary_nodes/computation_process.py | 589 +++++++++++ src/cript/nodes/primary_nodes/data.py | 431 ++++++++ src/cript/nodes/primary_nodes/experiment.py | 402 ++++++++ src/cript/nodes/primary_nodes/inventory.py | 148 +++ src/cript/nodes/primary_nodes/material.py | 445 +++++++++ .../nodes/primary_nodes/primary_base_node.py | 119 +++ src/cript/nodes/primary_nodes/process.py | 590 +++++++++++ src/cript/nodes/primary_nodes/project.py | 232 +++++ src/cript/nodes/primary_nodes/reference.py | 689 +++++++++++++ src/cript/nodes/subobjects/__init__.py | 12 + src/cript/nodes/subobjects/algorithm.py | 268 +++++ src/cript/nodes/subobjects/citation.py | 201 ++++ .../subobjects/computational_forcefield.py | 478 +++++++++ src/cript/nodes/subobjects/condition.py | 537 ++++++++++ src/cript/nodes/subobjects/equipment.py | 327 +++++++ src/cript/nodes/subobjects/ingredient.py | 222 +++++ src/cript/nodes/subobjects/parameter.py | 221 +++++ src/cript/nodes/subobjects/property.py | 753 ++++++++++++++ src/cript/nodes/subobjects/quantity.py | 258 +++++ src/cript/nodes/subobjects/software.py | 194 ++++ .../subobjects/software_configuration.py | 286 ++++++ src/cript/nodes/supporting_nodes/__init__.py | 3 + src/cript/nodes/supporting_nodes/file.py | 446 +++++++++ src/cript/nodes/supporting_nodes/user.py | 150 +++ src/cript/nodes/util/__init__.py | 508 ++++++++++ .../nodes/util/material_deserialization.py | 74 ++ src/cript/nodes/uuid_base.py | 76 ++ tests/api/test_api.py | 412 ++++++++ tests/conftest.py | 56 ++ tests/fixtures/primary_nodes.py | 321 ++++++ tests/fixtures/subobjects.py | 371 +++++++ tests/fixtures/supporting_nodes.py | 35 + tests/integration_test_helper.py | 98 ++ tests/nodes/primary_nodes/test_collection.py | 170 ++++ tests/nodes/primary_nodes/test_computation.py | 130 +++ .../test_computational_process.py | 133 +++ tests/nodes/primary_nodes/test_data.py | 167 ++++ tests/nodes/primary_nodes/test_experiment.py | 214 ++++ tests/nodes/primary_nodes/test_inventory.py | 72 ++ tests/nodes/primary_nodes/test_material.py | 134 +++ tests/nodes/primary_nodes/test_process.py | 192 ++++ tests/nodes/primary_nodes/test_project.py | 79 ++ tests/nodes/primary_nodes/test_reference.py | 196 ++++ tests/nodes/subobjects/test_algorithm.py | 53 + tests/nodes/subobjects/test_citation.py | 50 + .../test_computational_forcefiled.py | 68 ++ tests/nodes/subobjects/test_condition.py | 71 ++ tests/nodes/subobjects/test_equipment.py | 62 ++ tests/nodes/subobjects/test_ingredient.py | 68 ++ tests/nodes/subobjects/test_parameter.py | 53 + tests/nodes/subobjects/test_property.py | 85 ++ tests/nodes/subobjects/test_quantity.py | 58 ++ tests/nodes/subobjects/test_software.py | 68 ++ .../subobjects/test_software_configuration.py | 63 ++ tests/nodes/supporting_nodes/test_file.py | 220 +++++ tests/nodes/supporting_nodes/test_user.py | 55 ++ tests/nodes/test_utils.py | 18 + tests/test_node_util.py | 325 +++++++ tests/util.py | 21 + trunk | 442 +++++++++ 158 files changed, 18937 insertions(+), 13 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/simple-issue.md create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/build_and_deploy_docs.yaml create mode 100644 .github/workflows/codeql.yml create mode 100644 .github/workflows/dependency-review.yml create mode 100644 .github/workflows/docs_check.yaml create mode 100644 .github/workflows/mypy.yaml create mode 100644 .github/workflows/test_coverage.yaml create mode 100644 .github/workflows/test_examples.yml create mode 100644 .github/workflows/tests.yml create mode 100644 .github/workflows/trunk.yml create mode 100644 .gitignore create mode 100644 .trunk/.gitignore create mode 100644 .trunk/configs/.cspell.json create mode 100644 .trunk/configs/.isort.cfg create mode 100644 .trunk/configs/.markdownlint.yaml create mode 100644 .trunk/configs/.yamllint.yaml create mode 100644 .trunk/configs/svgo.config.js create mode 100644 .trunk/trunk.yaml create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 CONTRIBUTORS.md create mode 100644 CRIPT_full_logo_colored_transparent.png create mode 100644 LICENSE.md create mode 100644 SECURITY.md create mode 100644 docs/api/api.md create mode 100644 docs/api/controlled_vocabulary_categories.md create mode 100644 docs/api/paginator.md create mode 100644 docs/api/search_modes.md create mode 100644 docs/examples/.gitignore create mode 100644 docs/examples/simulation.md create mode 100644 docs/examples/synthesis.md create mode 100644 docs/exceptions/api_exceptions.md create mode 100644 docs/exceptions/node_exceptions.md create mode 100644 docs/extra.css create mode 100644 docs/faq.md create mode 100644 docs/images/CRIPT_full_logo_colored_transparent.png create mode 100644 docs/images/cript_token_page.png create mode 100644 docs/images/favicon.ico create mode 100644 docs/index.md create mode 100644 docs/nodes/primary_nodes/base_node.md create mode 100644 docs/nodes/primary_nodes/collection.md create mode 100644 docs/nodes/primary_nodes/computation.md create mode 100644 docs/nodes/primary_nodes/computation_process.md create mode 100644 docs/nodes/primary_nodes/data.md create mode 100644 docs/nodes/primary_nodes/experiment.md create mode 100644 docs/nodes/primary_nodes/inventory.md create mode 100644 docs/nodes/primary_nodes/material.md create mode 100644 docs/nodes/primary_nodes/process.md create mode 100644 docs/nodes/primary_nodes/project.md create mode 100644 docs/nodes/primary_nodes/reference.md create mode 100644 docs/nodes/primary_nodes/software.md create mode 100644 docs/nodes/subobjects/algorithm.md create mode 100644 docs/nodes/subobjects/citation.md create mode 100644 docs/nodes/subobjects/computational_forcefield.md create mode 100644 docs/nodes/subobjects/condition.md create mode 100644 docs/nodes/subobjects/equipment.md create mode 100644 docs/nodes/subobjects/identifier.md create mode 100644 docs/nodes/subobjects/ingredient.md create mode 100644 docs/nodes/subobjects/parameter.md create mode 100644 docs/nodes/subobjects/property.md create mode 100644 docs/nodes/subobjects/quantity.md create mode 100644 docs/nodes/subobjects/software_configuration.md create mode 100644 docs/nodes/supporting_nodes/file.md create mode 100644 docs/nodes/supporting_nodes/group.md create mode 100644 docs/nodes/supporting_nodes/user.md create mode 100644 docs/tutorial/cript_installation_guide.md create mode 100644 docs/tutorial/how_to_get_api_token.md create mode 100644 docs/utility_functions.md create mode 100644 mkdocs.yml create mode 100644 pyproject.toml create mode 100644 requirements.txt create mode 100644 requirements_dev.txt create mode 100644 requirements_docs.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 src/cript/__init__.py create mode 100644 src/cript/api/__init__.py create mode 100644 src/cript/api/api.py create mode 100644 src/cript/api/exceptions.py create mode 100644 src/cript/api/paginator.py create mode 100644 src/cript/api/utils/__init__.py create mode 100644 src/cript/api/utils/get_host_token.py create mode 100644 src/cript/api/utils/helper_functions.py create mode 100644 src/cript/api/utils/save_helper.py create mode 100644 src/cript/api/utils/web_file_downloader.py create mode 100644 src/cript/api/valid_search_modes.py create mode 100644 src/cript/api/vocabulary_categories.py create mode 100644 src/cript/exceptions.py create mode 100644 src/cript/nodes/__init__.py create mode 100644 src/cript/nodes/core.py create mode 100644 src/cript/nodes/exceptions.py create mode 100644 src/cript/nodes/primary_nodes/__init__.py create mode 100644 src/cript/nodes/primary_nodes/collection.py create mode 100644 src/cript/nodes/primary_nodes/computation.py create mode 100644 src/cript/nodes/primary_nodes/computation_process.py create mode 100644 src/cript/nodes/primary_nodes/data.py create mode 100644 src/cript/nodes/primary_nodes/experiment.py create mode 100644 src/cript/nodes/primary_nodes/inventory.py create mode 100644 src/cript/nodes/primary_nodes/material.py create mode 100644 src/cript/nodes/primary_nodes/primary_base_node.py create mode 100644 src/cript/nodes/primary_nodes/process.py create mode 100644 src/cript/nodes/primary_nodes/project.py create mode 100644 src/cript/nodes/primary_nodes/reference.py create mode 100644 src/cript/nodes/subobjects/__init__.py create mode 100644 src/cript/nodes/subobjects/algorithm.py create mode 100644 src/cript/nodes/subobjects/citation.py create mode 100644 src/cript/nodes/subobjects/computational_forcefield.py create mode 100644 src/cript/nodes/subobjects/condition.py create mode 100644 src/cript/nodes/subobjects/equipment.py create mode 100644 src/cript/nodes/subobjects/ingredient.py create mode 100644 src/cript/nodes/subobjects/parameter.py create mode 100644 src/cript/nodes/subobjects/property.py create mode 100644 src/cript/nodes/subobjects/quantity.py create mode 100644 src/cript/nodes/subobjects/software.py create mode 100644 src/cript/nodes/subobjects/software_configuration.py create mode 100644 src/cript/nodes/supporting_nodes/__init__.py create mode 100644 src/cript/nodes/supporting_nodes/file.py create mode 100644 src/cript/nodes/supporting_nodes/user.py create mode 100644 src/cript/nodes/util/__init__.py create mode 100644 src/cript/nodes/util/material_deserialization.py create mode 100644 src/cript/nodes/uuid_base.py create mode 100644 tests/api/test_api.py create mode 100644 tests/conftest.py create mode 100644 tests/fixtures/primary_nodes.py create mode 100644 tests/fixtures/subobjects.py create mode 100644 tests/fixtures/supporting_nodes.py create mode 100644 tests/integration_test_helper.py create mode 100644 tests/nodes/primary_nodes/test_collection.py create mode 100644 tests/nodes/primary_nodes/test_computation.py create mode 100644 tests/nodes/primary_nodes/test_computational_process.py create mode 100644 tests/nodes/primary_nodes/test_data.py create mode 100644 tests/nodes/primary_nodes/test_experiment.py create mode 100644 tests/nodes/primary_nodes/test_inventory.py create mode 100644 tests/nodes/primary_nodes/test_material.py create mode 100644 tests/nodes/primary_nodes/test_process.py create mode 100644 tests/nodes/primary_nodes/test_project.py create mode 100644 tests/nodes/primary_nodes/test_reference.py create mode 100644 tests/nodes/subobjects/test_algorithm.py create mode 100644 tests/nodes/subobjects/test_citation.py create mode 100644 tests/nodes/subobjects/test_computational_forcefiled.py create mode 100644 tests/nodes/subobjects/test_condition.py create mode 100644 tests/nodes/subobjects/test_equipment.py create mode 100644 tests/nodes/subobjects/test_ingredient.py create mode 100644 tests/nodes/subobjects/test_parameter.py create mode 100644 tests/nodes/subobjects/test_property.py create mode 100644 tests/nodes/subobjects/test_quantity.py create mode 100644 tests/nodes/subobjects/test_software.py create mode 100644 tests/nodes/subobjects/test_software_configuration.py create mode 100644 tests/nodes/supporting_nodes/test_file.py create mode 100644 tests/nodes/supporting_nodes/test_user.py create mode 100644 tests/nodes/test_utils.py create mode 100644 tests/test_node_util.py create mode 100644 tests/util.py create mode 100755 trunk diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..dd00c0845 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,33 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "" +labels: "" +assignees: "" +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: + +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + +- OS: [e.g. iOS] +- Browser [e.g. chrome, safari] +- Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..2bc5d5f71 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,19 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: "" +labels: "" +assignees: "" +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/simple-issue.md b/.github/ISSUE_TEMPLATE/simple-issue.md new file mode 100644 index 000000000..80f542efe --- /dev/null +++ b/.github/ISSUE_TEMPLATE/simple-issue.md @@ -0,0 +1,9 @@ +--- +name: Simple Issue +about: Describe the issue +title: +labels: "" +assignees: "" +--- + +## Description diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..da8255dfa --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,14 @@ +# Description + +## Changes + +## Tests + +## Known Issues + +## Notes + +## Checklist + +- [ ] My name is on the list of contributors (`CONTRIBUTORS.md`) in the pull request source branch. +- [ ] I have updated the documentation to reflect my changes. diff --git a/.github/workflows/build_and_deploy_docs.yaml b/.github/workflows/build_and_deploy_docs.yaml new file mode 100644 index 000000000..e0b1fc7a9 --- /dev/null +++ b/.github/workflows/build_and_deploy_docs.yaml @@ -0,0 +1,21 @@ +# build docs from master branch and push to gh-pages branch to be deployed to repository GitHub pages + +name: Build & Deploy Docs +on: + push: + branches: + - develop + + # trunk-ignore(yamllint/empty-values) + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: 3.x + - run: pip install -r requirements_docs.txt + - run: mkdocs gh-deploy --force diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000..0cc7fe0ab --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,78 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: CodeQL + +on: + push: + branches: + - develop + - main + - trunk-merge/** + pull_request: + # The branches below must be a subset of the branches above + branches: [develop] + schedule: + - cron: 19 15 * * 2 + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [python] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Use only 'java' to analyze code written in Java, Kotlin or both + # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: /language:${{matrix.language}} diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml new file mode 100644 index 000000000..0b6d4d1fa --- /dev/null +++ b/.github/workflows/dependency-review.yml @@ -0,0 +1,28 @@ +# Dependency Review Action +# +# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. +# +# Source repository: https://github.com/actions/dependency-review-action +# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement +name: Dependency Review +on: + push: + branches: + - main + - develop + pull_request: + branches: + - develop + - main + +permissions: + contents: read + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + - name: Dependency Review + uses: actions/dependency-review-action@v2 diff --git a/.github/workflows/docs_check.yaml b/.github/workflows/docs_check.yaml new file mode 100644 index 000000000..627d2c932 --- /dev/null +++ b/.github/workflows/docs_check.yaml @@ -0,0 +1,38 @@ +# this CI workflow checks the documentation for any broken links or errors within documentation files/configuration +# and reports errors to catch errors and never deploy broken documentation +name: MkDocs CI Check + +on: + push: + branches: + - main + - develop + - "*" + - trunk-merge/** + pull_request: + branches: + - main + - develop + - "*" + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Set Up Python + uses: actions/setup-python@v2 + with: + python-version: 3.11 + + - name: Install Python SDK + run: pip install -e . + + - name: Install Doc Dependencies + run: pip install -r requirements_docs.txt + + - name: Build and Test Documentation + run: mkdocs build diff --git a/.github/workflows/mypy.yaml b/.github/workflows/mypy.yaml new file mode 100644 index 000000000..91c2a054c --- /dev/null +++ b/.github/workflows/mypy.yaml @@ -0,0 +1,40 @@ +# check code types with mypy to be sure the static types are correct and make sense + +name: MyPy Check + +on: + push: + branches: + - main + - develop + - trunk-merge/** + pull_request: + branches: + - main + - develop + +jobs: + mypy-test: + strategy: + matrix: + python-version: [3.11] + os: [ubuntu-latest] + + runs-on: ${{ matrix.os }} + + steps: + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Check out code + uses: actions/checkout@v2 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements_dev.txt + + - name: Run MyPy + run: mypy src/cript/ diff --git a/.github/workflows/test_coverage.yaml b/.github/workflows/test_coverage.yaml new file mode 100644 index 000000000..125f357b9 --- /dev/null +++ b/.github/workflows/test_coverage.yaml @@ -0,0 +1,49 @@ +# use pytest-cov to see what percentage of the code is being covered by tests +# WARNING: this workflow will fail if any of the tests within it fail + +name: Test Coverage + +on: + push: + branches: + - main + - develop + - trunk-merge/** + pull_request: + branches: + - main + - develop + +jobs: + test-coverage: + runs-on: ubuntu-latest + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.11] + + env: + CRIPT_HOST: https://lb-stage.mycriptapp.org/ + CRIPT_TOKEN: 125433546 + CRIPT_STORAGE_TOKEN: 987654321 + CRIPT_TESTS: False + + steps: + - uses: actions/checkout@v2 + + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: 3.11 + + - name: upgrade pip + run: pip install --upgrade pip + + - name: Install CRIPT Python SDK + run: pip install -e . + + - name: Install requirements_dev.txt + run: pip install -r requirements_dev.txt + + - name: Test Coverage + run: pytest --cov --cov-fail-under=89 diff --git a/.github/workflows/test_examples.yml b/.github/workflows/test_examples.yml new file mode 100644 index 000000000..be4024782 --- /dev/null +++ b/.github/workflows/test_examples.yml @@ -0,0 +1,46 @@ +name: Test Jupyter Notebook + +on: + push: + branches: + - main + - develop + - trunk-merge/** + pull_request: + branches: + - main + - develop + +jobs: + test-examples: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: [3.11] + + env: + CRIPT_HOST: https://lb-stage.mycriptapp.org/ + CRIPT_TOKEN: 123456789 + CRIPT_STORAGE_TOKEN: 987654321 + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: install test dependency + run: python3 -m pip install -r requirements_docs.txt + + - name: install module + run: python3 -m pip install . + + - name: prepare notebook + run: | + jupytext --to py docs/examples/synthesis.md + jupytext --to py docs/examples/simulation.md + + - name: Run script + run: | + python3 docs/examples/synthesis.py + python3 docs/examples/simulation.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..4ed890252 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,48 @@ +# Runs all the Python SDK tests within the `tests/` directory to check our code + +name: Tests + +on: + # trunk-ignore(yamllint/empty-values) + workflow_dispatch: + + push: + branches: + - main + - develop + - trunk-merge/** + pull_request: + branches: + - main + - develop + - "*" + +jobs: + install: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + + matrix: + os: [ubuntu-latest, macos-latest] + python-version: [3.7, 3.11] + + env: + CRIPT_HOST: https://lb-stage.mycriptapp.org/ + CRIPT_TOKEN: 123456789 + CRIPT_STORAGE_TOKEN: 987654321 + CRIPT_TESTS: False + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: pip install CRIPT Python SDK local package + run: python3 -m pip install . + + - name: pip install requirements_dev.txt + run: python3 -m pip install -r requirements_dev.txt + + - name: Run pytest on tests/ + run: pytest ./tests/ diff --git a/.github/workflows/trunk.yml b/.github/workflows/trunk.yml new file mode 100644 index 000000000..c528f3b14 --- /dev/null +++ b/.github/workflows/trunk.yml @@ -0,0 +1,23 @@ +name: CI + +on: + push: + branches: + - main + - develop + - trunk-merge/** + pull_request: + branches: + - main + - develop + - "*" + +jobs: + trunk: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Trunk Check + uses: trunk-io/trunk-action@v1 diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..14651984c --- /dev/null +++ b/.gitignore @@ -0,0 +1,48 @@ +# jet brains IDE config directory +.idea/ + +# vscode config directory +.vscode/ + +# ignore virtual environments +.env +.venv +config.json +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# pycache +__pycache__/ + +# distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# ignore mypy cache +.mypy_cache/ + +# pytest cache +.pytest_cache + +# ignore coverage.py files and directories +.coverage +/.coverage +htmlcov/ \ No newline at end of file diff --git a/.trunk/.gitignore b/.trunk/.gitignore new file mode 100644 index 000000000..695b51906 --- /dev/null +++ b/.trunk/.gitignore @@ -0,0 +1,8 @@ +*out +*logs +*actions +*notifications +plugins +user_trunk.yaml +user.yaml +tools diff --git a/.trunk/configs/.cspell.json b/.trunk/configs/.cspell.json new file mode 100644 index 000000000..f9dd36d6e --- /dev/null +++ b/.trunk/configs/.cspell.json @@ -0,0 +1,107 @@ +{ + "words": [ + "CRIPT", + "cript", + "orcid", + "uid", + "uids", + "barostat", + "sortkeys", + "forcefield", + "opls", + "issn", + "arxiv", + "pmid", + "ISSN", + "bigsmiles", + "funders", + "Elsevier", + "Müller", + "berendsen", + "setter", + "fwhm", + "ASTM", + "lammps", + "LAMMPS", + "pyclass", + "rdist", + "subobject", + "Subobject", + "fcller", + "criptapp", + "pubchem", + "Chlorophenyl", + "dichlorobenzoate", + "Dichloro", + "methylbutyl", + "benzamide", + "Chlorophenyl", + "dichlorobenzoate", + "polyacrylate", + "JSPS", + "subobjects", + "forcefields", + "LBCC", + "GROMACS", + "CHARMM", + "Forcefields", + "Debye", + "FTIR", + "Szwarc", + "homopolymer", + "polyolefins", + "hydrogels", + "polyisoprene", + "polystyrene", + "mcherry", + "autouse", + "CRIPTAPI", + "Verlet", + "pytest", + "mkdocs", + "docstrings", + "jsonschema", + "devs", + "unwritable", + "docstrings", + "runtimes", + "timestep", + "TLDR", + "codeql", + "Autobuild", + "buildscript", + "markdownlint", + "Numpy", + "ipynb", + "boto", + "beartype", + "mypy", + "ipynb", + "jupytext", + "kernelspec", + "OCCCC", + "endregion", + "vinylbenzene", + "multistep", + "mmol", + "inchi", + "LRHPLDYGYMQRHN", + "UHFFFAOYSA", + "Butan", + "Butyric", + "Methylolpropane", + "fontawesome", + "venv", + "deepdiff", + "rdkit", + "packmol", + "Packmol", + "openmm", + "equi", + "Navid", + "ipykernel", + "levelname", + "enylcyclopent", + "Polybeccarine" + ] +} diff --git a/.trunk/configs/.isort.cfg b/.trunk/configs/.isort.cfg new file mode 100644 index 000000000..b9fb3f3e8 --- /dev/null +++ b/.trunk/configs/.isort.cfg @@ -0,0 +1,2 @@ +[settings] +profile=black diff --git a/.trunk/configs/.markdownlint.yaml b/.trunk/configs/.markdownlint.yaml new file mode 100644 index 000000000..276c23b5f --- /dev/null +++ b/.trunk/configs/.markdownlint.yaml @@ -0,0 +1,12 @@ +# Autoformatter friendly markdownlint config (all formatting rules disabled) +default: true +blank_lines: false +bullet: false +html: false +indentation: false +line_length: false +spaces: false +url: false +whitespace: false +MD041: false +MD046: false diff --git a/.trunk/configs/.yamllint.yaml b/.trunk/configs/.yamllint.yaml new file mode 100644 index 000000000..4d444662d --- /dev/null +++ b/.trunk/configs/.yamllint.yaml @@ -0,0 +1,10 @@ +rules: + quoted-strings: + required: only-when-needed + extra-allowed: ["{|}"] + empty-values: + forbid-in-block-mappings: true + forbid-in-flow-mappings: true + key-duplicates: {} + octal-values: + forbid-implicit-octal: true diff --git a/.trunk/configs/svgo.config.js b/.trunk/configs/svgo.config.js new file mode 100644 index 000000000..b257d1349 --- /dev/null +++ b/.trunk/configs/svgo.config.js @@ -0,0 +1,14 @@ +module.exports = { + plugins: [ + { + name: "preset-default", + params: { + overrides: { + removeViewBox: false, // https://github.com/svg/svgo/issues/1128 + sortAttrs: true, + removeOffCanvasPaths: true, + }, + }, + }, + ], +}; diff --git a/.trunk/trunk.yaml b/.trunk/trunk.yaml new file mode 100644 index 000000000..91d4b99a9 --- /dev/null +++ b/.trunk/trunk.yaml @@ -0,0 +1,53 @@ +version: 0.1 +cli: + version: 1.13.0 +plugins: + sources: + - id: trunk + ref: v1.0.0 + uri: https://github.com/trunk-io/plugins +lint: + enabled: + - svgo@3.0.2 + - cspell@6.31.2 + - actionlint@1.6.25 + - black@23.7.0 + - git-diff-check + - gitleaks@8.17.0 + - isort@5.12.0 + - markdownlint@0.35.0 + - oxipng@8.0.0 + - prettier@3.0.0 + - ruff@0.0.280 + - taplo@0.8.1 + - yamllint@1.32.0 + ignore: + - linters: [prettier] + paths: + - site/** + - docs/** +runtimes: + enabled: + - go@1.19.5 + - node@18.12.1 + - python@3.10.8 +actions: + enabled: + - trunk-announce + - trunk-check-pre-push + - trunk-fmt-pre-commit + - trunk-upgrade-available +merge: + required_statuses: + - trunk + - Analyze (python) + - build + - install (ubuntu-latest, 3.7) + - install (ubuntu-latest, 3.11) + - install (macos-latest, 3.7) + - install (macos-latest, 3.11) + - test-coverage (ubuntu-latest, 3.7) + - test-coverage (ubuntu-latest, 3.11) + - mypy-test (3.7, ubuntu-latest) + - mypy-test (3.11, ubuntu-latest) + - test-examples (ubuntu-latest, 3.11) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..93259366c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +- Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or + advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email + address, without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +cript_report@mit.edu. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..1c6cff44e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,83 @@ +# Repository Contributing Guidelines + +Welcome to our GitHub repository! We appreciate your interest in contributing to our project. +We value the collaborative spirit of the open-source community and would love to have your contributions. +This document outlines the guidelines to help you get started. +For more detailed information, please refer to our wiki section. + +## How to Contribute + +1. Fork the repository [develop branch](https://github.com/C-Accel-CRIPT/Python-SDK/tree/develop) to your GitHub + account. + > [main branch](https://github.com/C-Accel-CRIPT/Python-SDK/tree/main) tries to mirror the CRIPT Pypi package, + > [develop branch](https://github.com/C-Accel-CRIPT/Python-SDK/tree/develop) has all the latest developments waiting + > for release +2. Create a new branch in your forked repository. Choose a descriptive name that summarizes your contribution. +3. Make the necessary changes or additions to the codebase. +4. Test your changes thoroughly to ensure they don't introduce any issues. +5. Commit your changes with a clear and concise commit message. +6. Push the changes to your forked repository. +7. Open a pull request (PR) in our repository to propose your changes. + > Please be sure to merge all of your incoming changes to the + > [develop branch](https://github.com/C-Accel-CRIPT/Python-SDK/tree/develop), we only update the + > [main branch](https://github.com/C-Accel-CRIPT/Python-SDK/tree/main) when going to make a release by + > merging [develop branch](https://github.com/C-Accel-CRIPT/Python-SDK/tree/develop) into main. + > For more information, please refer to + > [repository guidelines wiki](https://github.com/C-Accel-CRIPT/Python-SDK/wiki/Repository-Guidelines) + > and [deployment wiki](https://github.com/C-Accel-CRIPT/Python-SDK/wiki/Manually-Deploy-to-Pypi) + +## Submitting an Issue + +Before you submit an issue, please search the issue tracker, maybe an issue for your problem already exists, and the +discussion might inform you of workarounds readily available. + +We want to fix all the issues as soon as possible, but before fixing a bug, we need to reproduce and confirm it. In +order to reproduce bugs, we will systematically ask you to provide a minimal reproduction scenario using the custom +issue template. Please stick to the issue template. + +Unfortunately, we are not able to investigate/fix bugs without a minimal reproduction scenario, so if we don't hear +back from you, we may close the issue. + +## Submitting PR + +Search GitHub for an open or closed PR that relates to your submission. You +don't want to duplicate effort. If you do not find a related issue or PR, +go ahead. + +## PR Guidelines + +When submitting a pull request, please make sure to: + +- Clearly describe the purpose of your PR. +- Include any relevant information or context that helps us understand your changes. +- Make sure your changes adhere to our coding style and guidelines. +- Test your changes thoroughly and provide any necessary documentation or test cases. +- Ensure your PR does not include any unrelated or unnecessary changes. +- All CI must pass before a PR can be approved and merged into the code base. + +## Repositorty Wiki + +For more in-depth information about our project, development setup, coding conventions, and specific areas where you can +contribute, +please refer to our [wiki section](https://github.com/C-Accel-CRIPT/Python-SDK/wiki). +It contains valuable resources and documentation to help you understand our project better. + +We encourage you to explore the wiki before making contributions. It will provide you with the necessary background +knowledge and help you find areas where your expertise can make a difference. + +## Communication + +If you have any questions, concerns, or need clarification on anything related to the project or your contributions, +feel free to reach out to us. +You can use the [GitHub issue tracker](https://github.com/C-Accel-CRIPT/Python-SDK/issues) or +the [Discussion channels](https://github.com/C-Accel-CRIPT/Python-SDK/discussions). + +## Code of Conduct + +We expect all contributors to adhere to our +[code of conduct](https://github.com/C-Accel-CRIPT/Python-SDK/blob/develop/CODE_OF_CONDUCT.md), +which ensures a safe and inclusive environment for everyone. +Please review our [code of conduct](https://github.com/C-Accel-CRIPT/Python-SDK/blob/develop/CODE_OF_CONDUCT.md) +before making contributions. + +Thank you for considering contributing to our project! We appreciate your time and effort in making it better. diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md new file mode 100644 index 000000000..d4034b833 --- /dev/null +++ b/CONTRIBUTORS.md @@ -0,0 +1,7 @@ +# CRIPT DEVELOPMENT TEAM + +- [Navid Hariri](https://github.com/nh916) +- [Ludwig Schneider](https://github.com/InnocentBug/) +- [Dylan Walsh](https://github.com/dylanwal/) +- [Brilant Kasami](https://github.com/brili) +- [Fatjon Ismailaj](https://github.com/fatjon95) diff --git a/CRIPT_full_logo_colored_transparent.png b/CRIPT_full_logo_colored_transparent.png new file mode 100644 index 0000000000000000000000000000000000000000..942727248ce04b4de33bf3ba68bc405c6fed2c9c GIT binary patch literal 30023 zcmb@tg(~IuZZ?$dwdjwE=(>3IKRK#CRa3 zTFEyN{O^{NqTyQr;A6e{hvShapgfH+)_&ESQ%a3lEl9^T+1(&R7M;3wYfBii^uqQy_V z$zP%w{QDr)>@C{xQMo-xw#8qv$ycn=PogPMwmC??1^ffIPpEpDpw1#RnhiW$mC^ml7Z4Ojwj56p1Z^3~mqn03zhEUzc1gr0XDh&bJwSijo zVTMf~^&3N8Gz4hY25QuQP_GS8tGi*PROhEugEnstHLCmgsv+cMU8rFL*fZFmF3F)2 zg=h>%*8h)~;@BFG_zrUZAHz@Y+vDFiCcXKd?9=ked^^)r}^E`QvzDSZ}2ZW z{D!zPzcV-XXJTNrzrTN|YpJ(suBl|SI%lXXwZAZ~2ki5|{>Oyh?}3K%dZ(?f1K{Hm z5Rs6O-=U#_(lIbGKVW5JW9Q`Ng$ao~dHPgBLQ+OnUQtO&NmX54T|-MpM^8^*-{93N zBV$uDGX%oI($d=6#>Uq6jlF}Dv&&mIcXtmD5AXkdeE#?G4FEsEA>lDGadF8h`IS|* zU0uBcV>26P-t$`k;4-KrE2HZ%y)~!iz@SG>us9 zx482z;PwGCea{2<9-}z;&GDTBm(&nVeT$l-yXdrb-e;jUID;X*Wmgw4wR8j(Z2NP1 z$_)4iwNRKft$OR=0*XrTkp1K199dZ&zgqxvx*>qIlW64Fm+^C91f+#yVW@9uNvGY8 z{J21mJUm2;!tL|-)_)^a01z94G7oBYV2;fpRO{nknk4@QVd1D;_w1=Uc$$2%|S4WKi0FOZ}ktkH%%{(~QzyXjCpq+&` zP1~sea2K3lMLSi&1Bgx=6Nc1}3V$O5nD3;+jH*ZTg$VuK4D16dRqUS^)8YWR*@7WA z*O$Ky0l-iZvv-y!+-DCA;r2p8RP1w({4VJL>AO29^NLYnlizqiPlqi%S>kldcP#)= zs>}Cp*Pcn*Fl{9TmuV!Yb)rSg@9s@bQJ^K$Te_D@k78RpzUWqt> zu9}sqj-<0$i4Y<14kNU>=b^K_Qv&k+G?;9;wXG1=IGUM30BG{}Cd=Dhdbpa})sq7$ zkDwzVt4-h2Hy2t-fUK9ML@VwUR1bGdZUI@ZO%`Jou*J^}myHTwrJJfkS6zs&x{rN99M#hPr#3i{|z>zFeUUv|TpL35__+ zOt`^KWM$vUJm@=f5N}%L#{sJUEC;#85g!d|6C!6+ zG1|`RUJEAi$w3D<&}(l@D|eH#V}o_mMs2I;sfN9#Nu(K#WlLs=yGtJ;Dn(1dzRc!qsM*NyIsno?eOfM{*n%E4y$ zSygyq1l6m-g12VkewTazb19#@umc;kuPf~Smg+`y$HXqs!{;jOzzXGGS^kz^HA*vV zdfG+=kZ-(BKbo%#24{ODT$EQsm3abWk=s-`U)@kFi^u^+dD>0XqkF&OD$UX+Mjj({=3S-duXah zh$LzoHS3~MN)>zHTS=d*R*VR=F;{W(IB}W*g6AI*9_4*c=@!P2H>#j&^!U8Uj7)m7 z!9bSIPz=#7T4cjp-Ao2po79@*+MwgyFagf$uYP96_u^v*RH!gObn)ZCPbdOxTBPq} z`XEc!W+oV|_s}{fI>Z=GHQS=?o2B9FeO;s~I8q+e!K~q{EH-F@8oVpM{LP@YU`!QeJG;XLET-~C23LUm# zMaT)5$=Wcus<>KhB|e}6fzeDaJCNEFbj2iGQ>(faZC@4x09{={PdsW)J9q!|JmN0Z zup^l^AkBJ09ahFUtDOtGFN}#j9tWR@T6D%5d9E%AM6}0-JIu~h%AAZWq_P|vX@n` zY`q2+M$Me}AwM++@c`>`A@x{rbb^@ow;O1bMqklAQo`ukJ4%7}21Hn(=NHB#ucYQ* zn%*GgP`30-Q~{p4nh&;egagy60 zEEXZg6*WkdArZmNonIw%qLmHir4s6%%U85f4lyr`uAIYtPkhhTxL9COKe|#x)1oH? zw){XlZIbZ^W30R7PUi0VE!b>O2`mgC-MMNCsGX@Y+0G(><9SX6GnQfsjx z@Wa*Y!M*KHENpi-U~Md4m=J{noT@aaVIi@wXW#RM0bucVFQomo4m|)kg(GMGScZqS z-H^TE!>aXbNgHnA#55UWQkF)1^|dTljcy?~41*pv4tvW|xD1H~WH`TNk9~HLw3XD( zo;nQx*5iLK$~iihabqh}>G>X2tbb_8@Fw0cVS(stnI@cLl33h~iM0NrF?_ znkaz;mW2KLvWlH;xBz2i%P7f@D5uyJ-Ch96rSIpKWo2gxz;TsW1JI`*AKbru+DH~ zF_>&NTVifSx0+wG6ZpBD5(PdhEIs+jPDUSxDo%F7{iiIOIz2BHCzw-G9))%%0HmGr z9m7B=1Dy5jY>PdSeJ1_w@f zt?%iUp$dF_(DkjwiLzI252mBWG9*{G7r0Y!NekmYpQU>7W(8Q|4vU5$(~5&ksT1!F zkU1(ayO{GoF@o+J`Kn=TypMmoF$H0p{7C$I3pXIc-ed{!pL&7S=IgxiOn^0ZZ zrbFY7|C{Eat91qsc!*SusvQRa`lSq9;q4X{vbcc|zHqWNhKc{@3B$mEz6Zpu`uidp-26b)PWCMK0yX zDt0T9t07`xE9}pXSA!mq5Va_|%@mghUIDk}A! z%6a?vkO^P_85Ulm-_&Ygl316Q?_07Xn8TfW(rgU+iH znc)eZv~yyb7{VQ{uuNwEo*2{}UjeuJBK!EG$E53$9rC#!=%1Jxeq;#5uwgZv)>zxj zwp?^(Dp+$?p))>@>SS|2@VnSNW5L;f0QIwbrovfgW;8uI-qG0zf1CN1De{iZVpN(b zv%d$(*iC>BloGpSJQY7+@B5<{6aAh4Hc(dez)U7*Jg`ziY(9a0ZeM<(2U^8>%K6hv z0;i8QwvbKsxA*67!-Db^M3N=*3_UUCR%+_^>>Lhhd=Pb9xr_<;vZ2U+VJv}tEd~GZCfFjIm zQ4H2#kPm)AaV^d&(7C9IP5{1sfb>sD1f=hSut>GNgwbR90SF0;nGylpH0EZIZ(xzu zp1W;^Ct)$SDei(MX0`xFOu=i82e4F^EJLC0OfW-zVxllY>bubbv9K>QFXXQ5Y&RoC z{>k$So&@{<$Lcru8g|Bqh!UQVN;FDKE1%BLtLa=l=|SDrzUQJsvP&hJ21JA|pHov!|ETi) zwW>Pm>toOFZV&+qDAnix4O=o33`SQ}Hu z%JfV3XZWX322cbVH=V55UDS^z!&w3EjR}jWDr`4CR?(@03~Z!+k15Q}frln7)otN0 zJM`-S=FHH}8!smpCS{yodVfI{nFj**8eI8aXWf+&u6AXj_VN5~+4u};zzuwcMN_Py zw7TqBleVUo%8Vd$o9Bz@6DkaJ9gG0_Ty+~@OJK zwe%rz;=beQW3!8M$#zjovX9GmRnQ+!!6h!Im~*R7hu3k^Fiyd4{Z#fNL=6Ebg74n< zF3eU!eEg`F{9#EF%iaDQbk~@a3Yt7$vO0exDtI=UUf3(&f?W8!MO1#%3$H7dB*<-7 zo?P6wM#|{sHSudT$)2myr+Z>#O>%JgIP6B_%U|GD-vjF6-SxKGLhj#jO(*jC$YiAx ziJjcbMr>JpEPcN@j8ywMT>kBDlwI~ibO8vM+wsD!>bhu?=3jXU)nY24bSaqM4bfL8 zMoBMPnzuj48{iOsMl>bs<^ zTxmnb7J@{rA;p_Im-!C&qZat@+7Jt!_H;$ilKNTiqX}fAkSNS#lF@NntVz?*kDo~~ zga*gYH@oY(!yI{-ec2E~>xvP7=1=U}L=kOuk7<))EL&v3S+;gdb{!5s5rD`Hq2Qpo zsuT}N!wCDlj%){i^0ZX?PlDN%=5)QEgeX(STcuuQyAJy5LilvvOM;uJV%`=w%+MU? zV46zTh(5-;{JGi#!7xG>fwxTuRa{AT7Yw9l!9+@mdy+)H2EL<__#^sGwTHixCUoxt zO5C^?kHP(7h0?1=L*Tft>+rhCx72b)g2jaVMJya=4}5C7w%WbY!Zv&s1u|tg<4vOV zg1-I6Hs>nc5jc3T0?qE>Vw;7|en~Q;qs8R4;l|uo+(<48Vg7Ewf?>z>VD~rB?{*s( zYC~RK%zLhD)L(gf>%8D&bevMuN;@Gc=i|fu5UMa2@kt zvn@=b(wd4pR1b*xN~QB@uDJW1k@%k9O7II%FFYI^s@uL@r8J}%$fUb?1KQYT4F zBR#^On#8)1a3mmAry|E+bMko!{c#x-e(4#~Wjy_c%pROzHUf2?7$*TfbRA6xkTkJ@ zIim7GBIpU%+a`;>ve%Xc|7kN%hw=H57D+Zgww<8O#6&F-TmT~?c?i{MGQ>2)JSOn0 zobPZ~Jt2OXJqCHn29nbaqWc(2uJ}*i51}S-guwSd?-7(--B!DTvX6JJAW<~xclu?u zL36)OlElAfMNtlET?};S>?9B&r?kq~pQ?tnhG@ev@38er_@yzZqPE_bB`@adwuZTH zh&})sq_vwux#Q!Sx3lzHw1ti-8RQV4@bC3SpvAFNAXkOqWa9rw1vLmccmd(Af4S$* z3Enq3cs@lH>YgIl<|26e&6~#Msc>etnROIT)ssQk} z1TXXAwr23g-_Z!Y)6lng&BUK+~SiECh*oHQ3N+n1tud|XqWrvQ$7aPhGxxg74wub$y{3-1U z05-%w>qiedEGQSfCe?8!)nug95Ue|Fi2NvAKvOW^_nglQ0BU1VHiM!F&nHuFr*a;K z;-q`fOAE3gY+b(6^!;FPgf_kTkyp>zu#eFRa8tzSuFCOAc!A1I%_UB;An2RkYv3noj|Ga3;h2V5`{zmkh{0_6J3OE4hsK;AT=+h8X5dj7O9ecreZbiO74p!9$0j!IE zCd=D*8$E&C`mc?u?Ct#GT%S1rd`mzPjqur|Ks_evuIGy?`?bo2fon;5_S5yA?8 zng9`!e)LlH4f|f^j%K-(jvt{$nwyNxW=pX9(L_;&q&;i^(0qeV z>7Drq_Y3*5e$FSk#wi4p9V4W5no0!lTOZ)#Eeb5oes z`%Q@2#$sS`Ga*s(_M8J04`_?~Yq2HB1OO5A*is{=bL$!~>EY4s8J=d*6qfhcHILY- zr`G?Y!C~AlM5y@mlfVgEsF)wo))AIV^0r|ALF!0a^ctB-j>YU?CZn{zb}LeK)2nHf ziR$kw2Xta(3=C+q^7vS*O^%-7!G=VN;+if$1(H`QG4w^#r33KDySt+PL`}zYmK<$& z$6q?U%H>74gJKEgCWxU^c@X}W4p=nDaY=^8kUK; z#J?Td%a}#FCXw+J()Cu@BG8oDxy~vUO%kYq5B?Knco`O6%Fjs0GcbNpmZIK73`^&F zcGxKESu{Bns``5Y$L1Bxeyx1T+zaA0jT5{af}2`20*4M%w=&!p-1Nj99Vk`_mbZBQH$g2bj}4$nFNjPY!`3+nf~g z)p`wiZPWY{6*@W5#pu??U8}yZKb@k0$&mP#op>ty@Xu`o&zD`eWM^F+;ZWGjc4*#( zff~7zV?mJ_Q4yNm{^xY-B0Uzqpz^o7Q!i{)WF05gZ(yrFR8%g7pNpNuw z!xd6H?wvJakNNgF@zm}i4GPQGciO{q{GM9$U@u)N=hM!Eve!hl&^V0qIkiOB{*NCX zZ=qCQCpWbgr5SC*bQ2(GgGM zMEmKPNjSSq`9dl8b;7EOkDIbcU!V-00s4)6X1Qf5REskM<61Cl3P{ z8D?|$+=X`X{HHM4iK4Z4TSt{X%)WzWrgSBuJ_wSdF7KxgHVcGwX)FyaNM+<6mOtNJ zwxGG~?BgejLZb{FdM6J#C8KIeDl-Kvc7#4si=f93Poq*bDVVM2D=H1yCa56H|QOEH^GS_MjCEz7W0 z#43}aJwG3 zXZMKOYIiYA=mjY4OSe1Dw09l}SEg&`Hzn@L$W1!VzRSea|CpQ-OJ0#|>3A)%H6Rj4 zh>(h;Yjn1&NQOxrw`&mz@z7A7cEbNh0Woyl14(w?fy}L2U ztpeHawcz``gvTuj)^swtVA$l_XF!8DUg5F1%p`JHJh}FCtFGX4f#}XRlm#ry+{5{f zdx6j7a1X={ngdcRP{pBiW=!7*PMk~lT3mNb?_K_bcKeJvTUnHZ0pLM}xf>#)>cAW|_@nNuRo9=Yz8t;!#Ft26 zT6Xi_bUud`E6NKvsjd|5-d*(1zU0Nu{}BCL_m%uMhNqX0fT5;-izPyg2m45l0j~qH zNc^?#8h#}hS*izgB-4Jg*|bq0`x-c)DS zqCGLj+|lCE19o6*)S#nNzoC8t&H};1=}4~Ufj-ahbPk7)WBk3%*;OzVeM1(s1sn)_ zG6R?;g$gFRBqA=}S$6+3^=*u*RWAhY>YM_PRv)8i3hRLy%K zHB&}zvt6HlOin_;?q!d$@b4w)-g znO|z*-WGN_xY2wJ-PP35_Hhs9goDu}iwQ3kes}Pd<@uw`K`+jO?depT_fvF0euX%9 zz!NcThYx~vi0mGsSR5f%JO~>ygIu#m=44@=q83mUgrYiaRjNQpzj;~}I238wo-7dr z%ca2PTJWG*N?$VZK&s*dLT1g=s=?-!+=wX^$nygM^^?4)*7&EV+W|=1Qxf*jQ*SaP z1-8k8A-x z@L{`QCKqp$Aw9n!(OoYP^iRYLc5gZP)oZ5?3qI{hJ$xoV~| z!|2%HjPh>22LnPU=ex--hyafzRG8BY!zbrx*fbCKjl{k!(VHyrr!eHTB*8-@*QyK~ z)0BfAvb?uAIRrUvt4rn#g7r`3ZB#|*n_&DXB2~6leM#D8nAOeVaPxPx|4w%m^J70k zq}&~hY=P!|;g1L?)~*ltPg0xJ>s6}eC-2oUZPt5PbsO;#eE86X#Ow+_O|_*N52TARR=TyUBZI+RoDZP99bIeBrC5C&U-vgALgdSR_Y)?zPRrm*0QlY_|e=~_t(rwAKn6^qrJQ) z8ed&!RvvSm02`OmU+=>C#3A~LsM>qzOh1}7;OtkQV{eyJ~u8+$`xR+#SGC+F|gk_21ww0)eXG`B)AvO(C_8%3(Hz2A+>ar-+!aAArL@1vbaPTf+^g%XBb(;35yGYlo3FFze zuO(|J{>T8Cyctk^7sj78>EAa_HTrnoDAdUBO|b3XaIh{J(-?~WDZK14^!azgrUZQ~(>(szt3B6= zOX@9I{gFzV(fkyv;&J5GQ1awY4{lb>f)U}^NWo&wyQN$RjrH%Ix@qJEy-B*-)6A}) z@JOaBy}3HKndY~a8h@4$#uiqFn&s~qtKP>|KF4D255Rq6oEuKyg{ds!ZrR2AdgMGs zOUNyC?6?F&*ZG019W~`}9iK5zc^)hGq~3=k;Hn!tvY)Ii4^=vldL;zL!+fd&inJ!~TSGY=KO54HSK!4o_ef zX_uC7?0y1Y`_UxmOFe3H_wct4Fk1KpE;rDd>?YA`z;?K|ETftC#)kwLXN6w>@YGg` zwA>eS`Fko}ILx#5s{k;|#>7>6&AdDkz`C#6*-@7(&-j#`;QG&9wYlAYT=WDwQW$5$ z!}8Y8`>t+J9tObsHTD?74!!^eqEaU>V7KFvRWfn?&%gqdcQh(}!~4MlOk=|!Tk=?# z+VoEYlSuVUf(;!&;1p?xUoN*^nKCOFuAfSvQ6otpZTzQUpzZ*v8pzlWe-T&jc&K1e z=JT7tOfMEj{!biX@hMb(|I4yA0(b$|q@?B8l&~svugmXqeXY11L^8UA!fiA72AYIh?E01#M>tz z=evorcon$4$u$fEpDAsq%PHav+G4E%Nj5MA5DLBQ5wAGQ!Tm=KlstpyKYTP&7dF5I z>VKOBZ2li)Mu(K>BTnEH%oK?YezvNY@}-Mc12N1~!U!Rya;!eNW1<2ArdILVz$vGl z8DB+vdh9cEB(aDCiC~@q)mVkH?-@p@uSeN1F z5*4CBgJ)En3srTGZ|owHXKKp;WH7KaQ+mkw2dsQ8Eo9@m7q?Al zfo}t!U}A?#B}+%e&7ZmM{bw*?b7D&W042;;ze&mJs??7!^ig1lz2WvI)(|4JPDQHj zw|MPu>uF7SfVTTb!qh;kM2;eq5_R@;MR>YQX_{ zkVJ>~O%-lwbr4ngVZ!vPwnD4vZH~t#$K}u@h$L?uYAXG=yw*W?Qn3A1o!=LSrpONd z$6!QRe?s=}ti;LKUlD`cwNLXfAg*9{!w#W;Ft6LGo`<$C$2sQP4dy)bz!orLYZ-cQ z=&^8H*e`d0=?%xZHUbPlHzf?u(!y}u@2bhkK7N&%3unfX+&z_>P?>%RtqQs}l9O`T zwXxgkrd^2F6UmsmsQmIjzj=An+_#}J|0Ha{xP{A>njgJfTC&F?4sw+TfqKCx9>Lhe zX@A)LVSuTa(~FoNu#Q9aN3PCy1)hEyV1jhuWr@VX8e!pAo;c`1Cmf1XoB2ZpnEL>y~GIGaT9)Uvq}79AHv$0qx%$i<*kr zJXK_I#k!|TwbY*F$W$39{ud+Lr|WwCTF>TA55Sd`7hJHjtTG9CUJ31(${Tt6KvTV_ ztn_0lD}Ar|u71wX<N%;tWG{%3$z6JKx)1-T%1oZXZIGBcR+!&1R5hmQ zipLm)n-NGVPAn36i#pi=lD+(CFYB&KOYC66@!a|~y-b>a)wk*sL%q!!y>z#eMgZ_M zw?v_7&^jgbncJKn%v%M8CUv?EsW&oE#m z7>SBytWJvwK^D)?`usj05zq|1D;&x&~sB38;X%ik&5rAkcyiAa4N^q{x1iE3#@=g zNeL+Wmk9~32y(aKi#rjovdJPrBc_7jpSQ@D_6#=cf2#`AY0A)iMjpBM5iGR>k11Zf zHI^;y?{C<5wdxySa%PW5UJ`2nz%_Ub*+nMAz-D*#@g>efrlWSyRlpf|y-?G^|9MKM z@15$0`z9QH|NaN@4KLUEA%~#Pt|ip(lV)CuIG}=m_3!Fv-X#ZiVo;S|`L!DFbQF&h zJ#=c!yRnW_5V*-Xfd^;_bAzAV*n&>1cH`S}uOT^KUT7Xv;+{W8fJnDVfP6*lWVe+? zPd&a+51uCwU3jpRU_hNgu>Ie%Axg4y$dhfZAdx$txv1B5%pZ!|k^yh9wCS>NOysPr zeTVX)LizMs)Q5_@=jYTnjf0>xk-g%!c!olL;7sg{j|qViFbs^LLZ96@85q)EI$mY1 zAjprI9<$To`CG9eMxDk^HJ0oTt1>1*Ktwv|`k9t)0gI4c$QwGjZ3F9E+bf7(g~Mzz zadDMjP&bH`hddmUl7FIg%WnF@xez};y%=&tsw`>ydzDojR14f7TiDg?n$E3DZbuFt z$G5pkZF8FBt{|vtqW16srwAmf8ojP1MoW=WplqI2bC&a{WZ?7;<2g03NZXstuQVZ| zco!`p?6>!NMjmv2Ef8d1@Lc`c+DPt@h`(lR(x~r-wuOp8?F)(wi&`9ZtD&Y1nrMYz zUl2!u!27OVkwzlPD2Gpr&Yp9s#tQkNf(IrgI}qUF)dW@%C;)eClw)vdLQU8i=x1!MM0d+?pJDY`pqmOF-o5y3_azVFP}dr%ua=7?(-FEMFsvPB9 z&6m-gul;0gw;W3UND2B5m7ltdh7`m-jv8RX5`c3@Xx^ELqC4cq^QS7G`6uU=H1t>l z^Y${FUts`pOo|v_V)~mKz7}OLOLET)^e4@`y{AxBf1b)3hl`dDaHTh+-qits6ZcPS z*fQS^{GP-&Uf|oug^@EmKqUNV(QpyB};&@(N`YZYmUNw68Y zYmBZOS)&91+b}a#IA7ktv#w`CeqhNgUVAdq=!IA7F;Teu5ucARNYfNNw-2Z+uI2pV zlhVMQh~_?6Y7W@?*wv*A04o+}ZKBWZ`)w0Q7Vn7^VI0Pu8C?`NdAJZ&`5?Wf*-=#@5)F6!2#d;CFi}IPUvO zZ!_En8_sYgN&1yl_{wY!0xX*BzqIE~_}!EI$(hqWk&DK8;$k@W3=deGZAqpT6tr}) z8($9z=>M2P4sJIZ=UlWAZ_}Z1D1Gwo?^u$wLHqROIaPeXU-0}T%e^?%=9|{i?LW3S zE>)?n*z&Yx9KhDfROMTBfKedK3$tm>uQsq(;o$w3{zj2$bDv}+850JqxU|4dz6Pc7 zpY_;iJ0DCN4>oW82xkqF-;4tQY56{CzF1h(2D;_b4~*j=1pxpYv0(swN^$^1f|q7yNgN>?cw0Ps*k@3yA(qde;d^5!27 zfG3Ni8a}Q4C$XqEm;3k;vu+NmrMlq0wsQ;YJAibEb@q`;Gy4=)zeYKZ#sQpCf6TIB z-}_0ozh1xe1buh-9~&srt~rT*Zykn7qhi(!X*ZXA<7f<$s!Lo@ouKuJZ-}z9Kvx7P z(Aa{fS9R$ilcr|vTGX01GdR?)acsChIcdhIYWza$1~C7|%NH8G$vz|tA(tJa%5phJ zGGN*Fz}ejOWMd$8{UA44r>Pw58IszC$RvocIh5kp16(NgZSH4@U;W7%zsM=H+S07#{}$E zp0!LEWJ_S#iq8%;CkIREnXWwV09jz-wenM?PxwDK`#Fy{pfA`qsc2Q8YR|6}*u3f- zwho8@T?2TTKE_Z9`;W#-%w#%bDwk*jO-;?d9Rw1f9A+vbEWbRN&}l zJ9ipUUP_NQ^a3rwXNY-aGu!!og3piP>aY)xRv^g8e|gCy3zJUA*a) zBH#cQRNqwb+D34lt_7rtsypgj zpjN9=P4eoIwXBU;Heg7)Qh?7GGxbt}X-8-nW(}94%jL_iRJy9ie!dv)(OVTqql+ zguA?o4T?69_1^KS^*sDFIMS_h;qn$G=tj?kN7%$^4{s)bqr{2!gcD&uJD`+-=B!<^ z0J2Jc8P67>4qMP&&P);f*A1UsMlha0R-vb@yK^~Q{lzCJ!^!@?jYR<+hw=L$S54hZ zzMU7C-ton{xYdJw@^bp9DnMH1{?|Hl=p#cpu=9Z>zaMI96Oewa_O;Fmy8fJ@7|W=> z<6E8XqCrf1O=3oz6OA zD1p zxxBUA<4U(c08Cn0VuTL%eDjS`MZo>Z|1uwCv)Y^f<_*@lfdZ)b84nu}Pgkp_Kw!;D zfXTPVD4Vse-#^Rgy(-CpX&ubFD#@qU*TToI=>fCUfJDYi79GjuE@7apKRLtXzN5VF z%KvU!c6^VP4no0l12aq2H=A8nMk0*YApetol+D)LQu?Uv2=Hf_8qksHbx`z3JYY*| zhWeYEi#UD#m&#j!rawHtzH4e`sfP%h2RD|1!Scr{`r4Zw$O0_Lda?X)Pf(r)9ZWQv zhLsMVRa!Fzp!oqF!fkv7sgeWy?_PW$(r>(ALE&VDFA#Y+-q_#74$r^FPNz+S&rkFK z2T+^J_vbdcf`C6W{30M2a_zcjCV0d2sWxU$hjQ2Iha-G232f8EIGwp-|HJB8Jk03d zLAR(e5y0^r+WE8LJZd1`R7Cg@4)8}4vqwrnacFwHGxXvE$4A@^xr+-z6PEbkXBG4> z+n8??(`Au8eUBgf0G}UDQuNR-w?;QO&hk!R5f0F=f+J1Ga4K#tbQy%54zS9xqoe~xbdH{i70_Jy|{9^k*y zx+|ROzz6`z4QCCTcmROB1O96E|J^5JYg)!uM~j+pv2~^TzG(B19ZR%KqGnH<#0E)_ zHhz~e2ZifOzXS&{#r>x-cJ0sdpXo8)*;HmR9{!3yc zW1f=APs{NdUEWJRu{ktN-E>^&WJw-~8%%09^|IgpLbBCMUnaEWs#d6>WCfj!>BGk7 z9&TLNi{d|YE#fuN`yk=jnGiBpF31@5HfMa0X(L<1G%dauXp^*w9bnW@pDw#L9gO=n z!Q@Lc-v1R{KGxzfM)8i$>8a9FQl;=vfjGQSyF^K3Jl+H{a6c|5*A5bGiaf&VF#qmX zsrlXV!iLuL;is46HL$^|s@%60?SVuBsY$O^+J-ZONg7_w9sOm_VW_jjvitQ~KK!Kq zYBxx1hcR8P>ffp0=(U8RK z*9YrQ##71VBYT3=9%a84Q{QUb&mVcTZFiAcR`lh!i+j1%#HHugvGdKce`C9*dPllM z`0}sl`G2w?{EI$!R@sv=;E#@P8}gm-K1ABR`(p5L=AJ6FV`ZMTr>6&^8G?fm+oF0P zeftQCB;&c4RMBBT)f>ASd>(%%AgITULxd(nWh%z>4MEt;8rQ@aSRq*;CmQwg^k{!e zugELJU{`E`?axBbmeu8h+fBc0Suz(wQ0NYpfbUNwncWv>RN8cpU%Zybn89Z+*$Jjj zU7v8HckgwZVLIF5rhfkW?C&5u)cd=1t2>_lBg0>ziK$JE&Fp5jufIfD5dzQ-E7f{@ z`obm(l8W=V=YcKA*-9p42@~-g66%AEQNT<;8>EKA;e?cBC~gXiya7U%P}g=L_&BN8 z&;T4R%%D*gdiEC+koMrFQfI{(z^f6d`D9$=|g!diCma`1;^Uo>wcjLr2z4zDeb4+FA1*1EkdV~og5Q7C1 zq@)`bxXr3op?^#6;3``iUpfdP)E0l#?L2>0M@w&(qUPbZ<)8a&vd>p8L1@!we|^-I z<80tr#r?Z^TeF{0s1J)pr9Fz#IvYzLw#8r{c!KjoFCF1;HrU0iq*(uEIQKpZx;-&C zAapz5=iT@nVi|Blu%;G>ZgN{L_xnrnY={PlM3Uj+z_r)L{W70knvxRBm@3H2W3phm z;uuqWK{XCh7j@!ySW0WCn^cv%nV-_(7Pq#|7Aq?&nD6o>sbQb}ch1Dm929{7Dl1)x0el$2O@6_HIvc z4w(dhtuK?9he%Bi37)TcB7|SxI%ZE~p;i5LGCph|YM=gn7a>HOGD*Td(5~Uy%Jq(VvVSBg?y`7HS!HsbVnR}vx+HIl8<8}GHpmD~ z{PeWpmbl!=se%_k4B!Ln$E3UnDX3dfhdzTp0K0X95l3>RUjF10BPCCzr+tLmiEe)W zW5mvLGR`NDWeXbrIQKFQ}U*MyNIr< zYyxf)hDXW2Jge>{Fgms6v?bbQ_bEG!T_>)aDH$n5@Y^v;)u@v0R$Vt~~9ii3_Pq=$^(oU5r{s`DAD& zS-HI9P^@9>#A5*R?!-ztS+sFewHax>LhL&;;othOs7q@5C#Np(bW85EgRSt@tW=0Y ze&C+Ps*v`$O0h5PJcO=;i}VT`m^Y@obZcc2*IxbD!xl&q zDXA-60iUi^=hW8t%R9d_*^3AV1m0%}5;%id2?wAU1hA>amy6hD#B{sqCAHbyBeZ&* z9R-WWkNms}k&SzYnfA}o^*N~jF99+B&bDKmsCrb%qY%YIi{l{m6Ab>q0`VLzzOY#* zX3MNmq}@BOZ1Ph1=h7S@?FLxP8VugLqcK#>4Y#n{*s<4&!dC%i;S_W{@H-O#^T81? zI+y2=bfOI)dRCa;3NY7Zpr$xZlC*qF@mUhgAM^=~aJc=f#|UcVR17lMX}nNjnY^BF z!hkVBL2!bykmE5m7!)^+OBYT9f7Lcv1KYn^_()=@(cEmr$`i6@5R|T3$x$c|#PNV( z09!_3@qw%MMZN_DavEzNMK1&#aL=e!bsG#R3sew9?OYT55b1J+1C|R-EmaL}JAMOG)kXJx2uj$J@fVjusn zgOCdihsfYng?+yIfvEK)9 zYM(8}N)F}7-r*hgsYklsAjC|Kspkpq#3(ebIz(WWVt9a9#t`87(&{42 zG+cBL;)9#Bv*@8%8zF)k8l9}?(T~o2ryh_jO_L?8GS&MK@@t;46zd3hHNQuazjckN zTs+&|f(bn=5B2iFey8&=ClW@tPlUiV- zlLdh&fjkE=Vj5EalSDo3Eql@X$@i?^t5Ct%w|d;v>WWSdw6$DC6iQ`fnH&v-1Ob2#{3%R3GRg2RfhoPy%-*VISk3G4{)34!x;exA+DU9Ju+|39cqmp zsGtvHys#@lBl%U`I;yL57&4zkDns~N*Hg+H`*oxS<@vciP_BxkxFNK;mGKawg=G1j zeO_cJsCnjyVP6nT_cPaW1!=Oa&v&w`914^QDL_eyr+=y`MinXG)m?%Ca%y=*V3J@> z`i@rM!axz0N(P;fjZPYo&W!M&74o=^3drZ@#lya%1@yt=1rd{q6Wz8lnS?WiG+2Oy zWk~XC7OGel5zfgBn}plu;({5{CuO%}TEL~UXP9Niv(rgJs@S*Mvq)?f)vZClAU}?9 zV4oVfl3VLMF~yxwxT=$w2LMCBW^^OYpdU?$|GzjpJJ$ju#bqOun}Qkk!5-*9Ct+xG z>reNcJk_D5-mxD?V|-Ic6AsQY>@u2!Q$h;-1SY&%&{vZq(cO0r-S`ncn<{QVg`>BB z8tJrLSGF96in>GbVny1a;;vd<_Kb1K;QSr&-4dVnQ6ck{ls<|=wB>>Rck5fQ`1X6S^>HUqy z6Mm}sb&8y1RViRUA7ZF%h#2P5IF7ld%0aTP(33_ep0^eR4pikM40aa%qE_-7)qPCsWJUf zQOJ>@O3x?f0S#aXl2(=xi!vbs!PW9}?tKDLNai zWf}=_LMXDU2^UF|w~jk9^+*xJJT>Ml1PjrW@O=Bi*t}d>l8ijXtq3{b#jlPkVD7P_ zIRLX8Ud|+3O-LuL2oB>RL|gI>8y6a^!Oy#96{o?t*(BBu4oOLt#%C3q$wqT>?B89W zzyqp23^`k7+K)MfqkFMrT))d!*FlKa-6i%~>sh_evzEv;lYMGTKU7STN#bA(56Jg; zz%X_bz=#)O8V+Qx1HeNFL!x?2n&W&GjF0I6T8fQAK&H#kGK9mUrr#+S4TkUMTSy66 zZ)ts7zwN?!I)yx=5*Lv)1sISv>n`B!MF5ZNz?ZtUF?AFnBwG>E4mb-Inh3$hU}3IF zGlXZhLJ2Q$N}VG)Dze3^b~Qg*{ABS1VC#%v69|8b=!KjmEfT1UkTDwR4LxwU%#tVpx96|`1XzUw>{_^7u zXmJX@LXwlvI6t=c@>55U5(S^2e=z9l{f$p8EfD60PvK}LAs^w_K(fdV5EwS=E;#@8 zF#uq35l+ChF$u|K0_yH?@Jf>pRSzBCNc)5{`Tl)`X#)*bOpec{KS@RTpe>5jKH#>J}S&bDgg?v zJ}D#DX*ypv&S|9Wm-Be#BLKj~i}+-C7g(&=7?dxXvj(^>{`Z>zfQ1D(!A?W6C zcNSi4cRphldB*x@9*lMhBRMh(r&P%$m?yX>WG)BadjkL-`SQ;_27nUvfMEo&w$BZf z4X$5VxPTRIU%rgrdvGkgf^X0#{pk<@(fKApN$zt{@nN{^Ll6DF>Bde8of{v6^0`O} z+VAi;7*@{KCfJY4svXq;Mm}eCzNR(@#!t1!@|5$IezWAl)w8(6^LsCFX-o%abi41u zodMIHM@5i!)xT(Au3S|o&Y$0qLY_KJ@>%=MCJK|J4^9<7R2Y8;clbWQr7=qxXf{>L zMYH`f`l9o)zzRbAfFQ*@(<;L+eZhe`!>+lbnT1QPPv-1DCDp~wOINYN)vN#c@Dq3x znsuf&`hGpw*EG7K`^x{Z5=c7Ood;MkzGloFU2^{A8dkV=?ZPN9CU;cJ=Z5&hAgTyV zNSv(|g-N6w^?w75v4r~Yz1J>bg&Q|6;fXC@1jAvlA9FP@ZofmP37x%rsGn6b0@9PNnczf0N z3^P`lO{=mu=hb#<+>QB*>)^3N{;5C~_S&gba!KY1d2jVH{`q~Z@b=rA4>*q{>;c0F za~m<3q;P=zHo*MjV;GA(|6lRJ&M;ygbwN^mG+gGWZ+~NAyXDK0?WKSl{+3Xq!Tyr; zlLMtcFvV03GiDC_GQfy$>0ga^&H~KY?$X!~jqv7CFf+1Glt8*w|8X00_1RpLg+KrV zoX#w!uzM3$k=_{+$9c4xOJ&m5?KuLoQW!Je!LAgleYvTN@BIfD@pr$wxFhNg^>Fv< zIP&>DpMtrkj^>6}VdufZ>`waZ^42>qgSq-Qzl}$AI=KmwzaDIUU(v6n@=B3B+#k&eg zPM?LNrwNRBPCq(4*=jbWp`krIdFRVID=vg4@6Ks}xd`l%ssxKrktQV{V4N2L<_wp{ z(#XiuP7%xisu`--f+-8v7J@{kbwQ~{6qr<;r69cq{n{_2^Kt>&>g zF!$KeI#nABFH6Et>vY0}G4t{}Z|QV0k3Ks)TS)!NlLv${4Zs{rZ6)-esxW5Gy}M*H z_~It37)Jw<_6E9a5_7Ui>_NipoRIg;)Fsr1ox+&;;SVm|RY*E3RB?~lqKe6kdKcoT zBOoJAHtt+@5n{96Hx^GL#6{<$=zSE+sf7yj%Wbc}h!AHS?XSFC0wdg`05iWV%KZ3d`e2J(M zYK*c)U9BRqG!)6UoJ2g@BZ;KMgeU5Bq2MIEeLM4Z_jc!gkoWmhhZT2c=DqiwH#2V{ z#EBztar<>t^Lpju2o})kwSdH5pc+|>Yv5q^??AZfc99qYgS=F7-7P|p6IDM?NM=bi z%#-x|0dbNKAsT$hC3?=xUoP3aF;$EfGk(5%7f9huUC_G*!oQynAnCpX4<{u)MM{aJ ziQyVkXr`pf?wC|UHN~=Y?Sa>Q*Z+WqovNo^Mhwst5N=xE2jO23MwiKnF)%dD(CEYC z{)FC-2%WtNzbXAT~WHOaW!~^}%d20fM3HqTNvYf{nRA7l-#E-Px zL!v^%Nz8m6WZg_Hp)mJPAYAxlp_X}q1O9A&S>u!W&N;47sZ}HHRjD7cNmt7Fp2v$% zGe@>*xE_a5v~ThgPk?%tWGcvfN0V7f zx8t>dgiW1k7??htxd;yvATP*Z6|jb5dqR{$erDjF^xA$=U)Zq%W7sFCo-_297bO=T zGbW1}Z+qD#jA{Gsb)}5|8-zQWWOoWx+=7P^q?B`WlW%-$bab?2Vz|bn)+jFuRa@~n zK-wO+uY%%CQLz-A=iHw3@8RL?>EM;wcqd?#ucV2C(g=*p@4&;q&AAVo*K!YD znH^5ZgJF!d6?+_Olq_g@*_AT>E_WA;c#fY#7hwwC%e^}V!je^em|vSRE`xBvW$bD0 zV@dS15ghpqD##5eIfHS6N|nkr5Uze4s|6$w4^-Iz=i%j@c_7$gt~^QgoSPH$X9n)c z71s8#0%Pzu50)&LAlLl$ZHTUjpSrG82jnebt*EDf4pfs_g7Mp#TO0hGyVl(k2Tl@N!ez zCV8^hO?Xtd<>`BGyz$2D+$y)^DnEmZNeRYMV(R?)JMeNVMCZTCEOMDaUh;mCLB3*< zJgcr>aT#8&ET~;3&p4am>tafwaMJ9QWXYr0o zU=RvH$DZYQH8tzZX~vJ!Oqpn*(R`fvsyk);7AXI5yEY4;hG!veW$a#5y9fnWXUV=i zEmn!0$XZnHF|bdbmaJ2JQ=%@|4hR<|Oh8vCnAVXOPzwjYrsS{wXxA>f@hPvLs49lf z^m!_; z-1u84`0o3tCK>d^NLe>|cUBo6Tjkj3DtVflGywh3*?)$D@6V7A<^4X5;AXK4AbeZG z6uS=oc3K;=-TQBpF6lq#G>y+=aT&e}S%J{w@B_+WmY+pR*Z;HdFgHykwT>|uM*AM` z!NdFSD#!Ri!7O@&*ybcp@|s!zd`ocwMMP9`=HtX8w34xh>or?ISz!U2K#0pjo)4f) zAN85X*ZOnDVfH1l;eu9UD8d~4_Z0eIhEsAzP%Zvv8w6$g@AfsYi08-jiBQy$=iMi2L7`(@-Aaq_ za994SQ|_MwO5CK8ZQ%ij&4=;yJCT%}`8Z+us^GIvJQ<4x#I1~b1^oHJGXY~@fj7be zH$(&&ieM?Ce()mHly@%KP)Z4PCl4Nhnm7m#X)c#Xg~KA0{NK}|T@oEn7|Ls>HY?C0-(=_A@-$NcmPs+p z!OJ{36x;ZeF@29EW|Wg}q~|RuVl*O3r+o_*nJua;z4RKm?db z{v#;)@jL>;8DWGWV)Ns}Vmi|xmftWEh>u07{mV8nCWFW&2O+r`ARi&b=G-gUWZ)*} zhwJn_>Oda8$aB!Zr<$+9%WWd0W9Ftt9DPKZ-+(wWY-kqqZ+vSmybmS+k7~5_1uN`9 z>Skj8Lnyf%u=6XZJGPl+i?jk&%`xrAUxAVj7ElW-QoER3Vbm?3Hk#xOcgF3Q!u0ab zNoO>SQ19_$RjX#=@@@qHOb3L{_{~6r6<*1ij}s)MUo&n23B(Nl=67eANWzW6<(NFz z-H-zOKFsL|{;lrA5P9wHsbt;K=Tc7O5w?od?LcdMLhKkgoEsU=AUN~yMH{Z>aS)JA zq*f}A=top&v&^nQ2@!_|#F1%3yW{6Dgsx3)-1Jk%{8rZtcaXZF(9XXFCGX865YwR* zNo)CDg0#k=Vc`QPS)9|jZ=P}OfvFTstw790mk&H;v;g}A%poI8|Kr(h+tn7If**KyZmzpU>@oQQ$zwI03z_^sFv== zvx9=!*u@Lc87X4Wsee-Q2B%4wM&CVu4j+;wRqoM>HD&D#hM4{_y%$}{^*eF-#O0ga zW~O2m*-pV&K&;9*qwWfx1T>rj%M>EOI6>KH#Gj?y%J$BpLWRG=uZ0u48`ZE{*_j5OF(nkV^||*R-K+ zn4%?(sa`BfdeNp;$deeZx(TKL18^o3!SNK9Bshe4?C{YT zQnD2^3L9y;FhM&l(5CTF3n=?pz$Vz_;Wq1^Q-_7O zIz=X)V#afmaI@bMsAA+8;>U4bkpdh7gZ+6Jp}oPUjR9HetpVmwgy3W?a|{7q1+LFJm>HEX?k)81X%FjNBaK$5W`L z^7Y0X;76)ll?DuJdE?_k$ff z-NCiVnd&-QU5e;ZfE!!5gz;-{q26=_3U?B1yv!ND|H1M|v_qoSv|o$fuF{5q3yqDA z6U*>NgtCA~NzFE50SS*-9fmJo7kE%4_LY==iu@1=tyDrN+f0^YQk!@lK2rqqro^a1%Gc?~m*TV~_2UEd5 zbQ{|6(0S3LLBhYBsaf-60ie8b=BiOCp*HTd_L_N6C1*BH(0St)sCY6?3rGM(giIR0 zB=9LhbmT^|P_Y|Iw(ndAvqu#Jt#)8*QM#2~(x~}PxMyEh2K69+Y_v?YaS9QJYcWK5 z)~FufI(tjoWJfoYq`~0MS%~L7E~Ur`>Feh}AS&EU=WlP{ z0~HTj_XgU9S*rCOJdq||Wd#kQN$bq}kOZ(H*2M!VIn!~X$h?&i3rKhpX~Ir{{NCgO;mZpC_5rLxLouV#TrC(s;05?s9C@mRLjVB{CwG;c6M*P%rku z%h2#DLzxHQ*Gx)wDa<_GX(;L0Kxc0$TR#USyZ8l-B8?GA1eY>D1YzG3FwVATpyV?x zcVSiou!5W>>fMUk87B^EK-9mOG5t`{%$p9ilX=mLfK2+QW`-Y-c|Zyc#6V~?6(=6w zqtftO!dt+8tpy|iye3hNcnKUScg;+BGR!`lr&@byA(S{8LQ_+5uPbz#T~|BqyD(89 zN-!>t*~;YAbfn=rBaJJ2%QyMExo4H#2UZ!(<#J?VZc>Bruet~R$bY~($3ivF!XG*5 zdnwr=0lkz-g6DHEf<-LGAK5~C=Ryp(R`e+eap7}-s9mdz|vYk0zk=vi}DEcBwrAy#wscIrtS<7 z3*YXd5|$C-wkk1I3KVG6ugeF)a;k1GpeTPz={6;fp*=+#Qz)_>zb>PL{zT zgs>Y}Q~Qqe_mcZ|ZlnWvz(omPes_n@isWhOo@sZ2kutpk9GmWg!R&`VyIwcZ#Y!I4 z^gIm(cC-oR8R^=dvxZzm{rDT%GMf&y$zK~mvHZ4=XI+l`XJ{C;E4j1g{3w8$2LWZn3#Ll?wvoKMJ zn(WLZgBZGIxPFlzu6#fjFM^OMBPw~n;glsA4Q1~AAdEv_b*FOBeYU$5qhVa(WykL; z1>;uICY+6mE}$Gi$_zaL6VNz12nFeF8>{oEGDW%lT-UqWUimpeKVT2i3?kSoc^inX^lk zGNx_957G*RzF*F7jE|d)mG$nzUn$Xg8NH^|W8$wKs-ML}m zUx4@{Snwmdn5C@DIHD9g_YHH?GDBPFvtvPWd*+_cd+xbhS72=0zG%Hgx4<%zLUXHT zMV0qpyw04aMeiD;_Y;SKaxtc$@*p+Qx&|z;25{)K4;2~NDd7sr^8%GmLB)g26;-&L zI`91p?h-{PRL~9FZ!UwA4+)x07Ck4PL#c#LAH@Vy5E)4%lK+~q>`EhCwl~%j<8ls` zS{V~u4YgfOQVjajjov=x^=0W=K>A(+(dUYcClb%rv-UX_qTq)*9R4-H%+B@NdAcf- zu}y$kp4Q8RH>ouz=>0?uL1LVF4wW}E z*U$iuHGm^yoH|eu)6Rn^*@i&DISo#ElQ|GsZlbL(04LvTL zepR&kh&`xSp061T$Nww$B0}$Y5cz;-#@rCwm9Armp(sn)kjU+m#CdrV@QlCfXdAi< z(h#GVHAKOQ2_K{J#~|LSmh=pFA5|tpu_qwjjp=2=epCwM+QO@9PE$|eXZJTWgz5yV zsfVxq%OG;S_wla=B<`wCAH^$!r)W1YPp(SA-Lmc`B=7$N z+OFAFw(EC?;?^Ps#*xblm4Lta#~jeM?1P`8hX&3uh)7yx=4Ky~!ShDp!>pnQP-QZ` z{D=Z$ZwPvsaEsc#mI|-Nq7oRV&^!_k@k28UGf>ER!@c+Mqj(W1J-$ML@l=`)Y(pJo zbZnS0n>Ke(S2OvHE`e8F)~&i46c379)=V@oYv>rET;t9A36jjj;PwUW91#Iwl4_o+CbBmCiC^@vinN5D;+)<;bNx# z4qkhb)~ZSJ((0oaWi+rVE=6>#YxRF&EE_%y&32t^WxKKgli1~xH+kC-dJ8j}qOogR zesNIAI56Xvoh39V#pDk=XVcn55XSL;G$x|Zr0JNZDZY?sU`r|~~PWD?6+PEg}OI6gSq7X}dS8$BHlJ8c18l$pq> z44^OwP@hsap632T!quN|KjRPavw9@RyYYu;acE)*hGDDJ)Y8BKRLCfB1(JSZd`g9PqTV7SWps08x!wSoue^K7loBP3uV+Kx<=kJ;9>Rehd zV%nbcaU{We(to~XTUD9FIu3rEUjqPN&k4kMndnL#XST#A`)FUI=w#dSfm$nTA9bx^v1{K#`BaiRJ6x-hDAI(`^Y} zk_|*q;$%q&KsR}vC9Yn3X3oKXYq!R>Yo72SRo|>t^0ZT@flYt8X!zqcjcpt2un`*F zi4(;fc;bkEwRUh@_c~|+UI#wv`&nDCh93&CLZ(WO>DOJOFKBJyYo3silW z@Ebfz0S+U4nyCom0$2^@5vPy9@AmHJ#gXHR1NgVMXI3UQf#3>O@*fPrL`NSDd)WWL zYe+EUWQBN#VP@G&E}KIxLK4bECcURzq9x>%tH1)|zd#`}^DlJZg@q6qw}*CznHZ0{ z)OzaaYPEj!_j6s>xVpMty{A%lDeo;A7N4$>r$vj5@Gac+?T|FcAo0{rJ4&++m8jGA zV1V?ZhZ{Anjc&)ps&wuO{0ww6=xHd@! zu=K4+uA)i_=7KcVMrDji0^}`a{wPp%)KhCc-u&u0WYA&caGapU!xnU~o+JTw%O3=5 zNgTiujdCTn2wUTj8mn3=^i(f1v>YNpt&GvX7C|nObXE?i06LM$VZ5I_O=R@|jj<)M z6n|7&{(+4ed)CVct$W9#%mbx-R2zl`r|T%tj|{hoKS0{i=1OBp`hZdzS5~D2b75^% z{+PUy++}POi~dr`XQE^a-u#NWWYA&cxOKAqpI#ZhF0iy!NxwV#PK7NT6-aDVa!8F8 zEfsn?l=&DiMI+!Z&?{qH_B_FQB9lH=h}QlI@p}icV{Cnf$%yGIY{Q`VuSo!7Y*rU3 zOGgk#`g#eGSx{_eRfa+4boJLy*AQ7Q6UiOc35x1?0*%c3$k2}k8u2albqIf*e|jP03H& z=AIgL!KA=ap{GNUdEgQOp;{T=>Q2^wl9Ow-wC&DC)Yb>IKz2M)KJ5MrTQL#j6}VM! z?mOD?+0uHMh#HGyEbGJqog}N0NvH61?I)rX#-X4TUG0k;dA({q9ZW>DtLmbBVJ{It z>FlJBx>3eN+howsnjLFjF=S7Z$UfuH)`e{LLTh;Q3+h~NG=p9uEyk8~Vs&X7U1ICh zWTP+0N@7#<)AqTiM%}bPo>0@F_`Sd-0bEKaAZn|WPqM!)eiF)J62!+3_Of-%vKoX|hNcFx2oTQI5jUo5@nVtDg+ z!sTW&s0ZaYrmHl-GckKNVQ!L5fnIBg%^!u>9*5Le{Zes>bE{9{Po{1W@G`qHwuN1^ z{Wx<#1j(xq>kTshf^RpnXbPq1B)ob z;V$Yne&fL)qq95v2$d3OMH|~=@@qeHOlIRkF9W>FpSJCfg+c2P%!&?omeQao*XeMf z6;S%?*jjJoPLEx`jDqpAC(2Q-MaMxx%t5xacV5%&!_q4k89_y zBBwxeYOIaP!$cbs{qoYL>!!*vQd2VR@y|Tm^1oT?!s#3TpUi^W1^e%za-GUVb-wuy z(W3OX^tlDZ*Ze7&pA?Ec*P8cdX-aq`Xyd|)<)?Gy{-tK1R5 z%9t$YSRgToi0FVlOSolBd#LsP!@D!y@a`D4yb6H^F@Fkurj6_XFk4Tz_W~rhz{JD zf4AoIEMwYJfBMAWj**LbfbTS(CPq1DON#(j#&pXdi}*atn26{A8DtTUXBpG}h!QdB zNqymc^9AtEiI2CD;`j1K04rls@pqw#h-!qt3r+i^f3aNdK0yW<-Xj9|1|BWqvlK)` zb;V~XXwQrUdB{A<8v(40Ng*+xr63}z4Vm*<3feE>Tyo9qAa4Y)GA4y2JePuqs5T_w zxfHZ-f{6%pBC`N_NF0EbF=;cT4ZS67ytm@z@uz}3BOe% zBB~4VMUf_X;TgkV{@5+vV{{grP4XnWocRC% literal 0 HcmV?d00001 diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 000000000..f221901aa --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,21 @@ +# MIT License + +Copyright (c) 2023 Community Resource for Innovation in Polymer Technology (CRIPT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index b61cefae6..be1597df2 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,33 @@ # CRIPT Python SDK +[![License](./CRIPT_full_logo_colored_transparent.png)](https://github.com/C-Accel-CRIPT/Python-SDK/blob/develop/LICENSE.md) - -[![License](https://img.shields.io/github/license/C-Accel-CRIPT/cript?style=flat-square)](https://github.com/C-Accel-CRIPT/cript/blob/master/LICENSE.txt) -[![Python](https://img.shields.io/badge/Language-Python%203.9+-blue?style=flat-square&logo=python)](https://www.python.org/) +[![License](https://img.shields.io/github/license/C-Accel-CRIPT/cript?style=flat-square)](https://github.com/C-Accel-CRIPT/Python-SDK/blob/develop/LICENSE.md) +[![Python](https://img.shields.io/badge/Language-Python%203.7+-blue?style=flat-square&logo=python)](https://www.python.org/) [![Code style is black](https://img.shields.io/badge/Code%20Style-black-000000.svg?style=flat-square&logo=python)](https://github.com/psf/black) [![Link to CRIPT website](https://img.shields.io/badge/platform-criptapp.org-blueviolet?style=flat-square)](https://criptapp.org/) -[![CRIPT Blog Link](https://img.shields.io/badge/Blog-blog.criptapp.org-blueviolet?style=flat-square)](https://blog.criptapp.org) +[![Using Pytest](https://img.shields.io/badge/Dependencies-pytest-green?style=flat-square&logo=Pytest)](https://docs.pytest.org/en/7.2.x/) +[![Using JSONSchema](https://img.shields.io/badge/Dependencies-jsonschema-blueviolet?style=flat-square&logo=json)](https://python-JSONSchema.readthedocs.io/en/stable/) +[![Using Requests Library](https://img.shields.io/badge/Dependencies-Requests-blueviolet?style=flat-square&logo=python)](https://requests.readthedocs.io/en/latest/) [![Material MkDocs](https://img.shields.io/badge/Docs-mkdocs--material-blueviolet?style=flat-square&logo=markdown)](https://squidfunk.github.io/mkdocs-material/) +[![trunk CI](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/trunk.yml/badge.svg)](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/trunk.yml) +[![Tests](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/tests.yml/badge.svg)](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/tests.yml) +[![CodeQL](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/codeql.yml/badge.svg)](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/codeql.yml) +[![mypy](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/mypy.yaml/badge.svg)](https://github.com/C-Accel-CRIPT/Python-SDK/actions/workflows/mypy_check.yaml) + + + + + +## Disclaimer + +This is the successor to the original [CRIPT Python SDK](https://github.com/C-Accel-CRIPT/cript). The new CRIPT Python SDK is still under development, and we will officially release it as soon as it is ready. For now please use the [original CRIPT Python SDK](https://github.com/C-Accel-CRIPT/cript) + +--- + ## What is it? The CRIPT Python SDK allows programmatic access to the [CRIPT platform](https://criptapp.org). It can help automate uploading your data to CRIPT, and aims to allow for manipulation of your CRIPT data through the python language. This is a perfect tool for users who have python experience and have large amount of data to upload to [CRIPT](https://criptapp.org). @@ -17,7 +36,7 @@ The CRIPT Python SDK allows programmatic access to the [CRIPT platform](https:// ## Installation -CRIPT Python SDK requires Python 3.9+ +CRIPT Python SDK requires Python 3.7+ The latest released of CRIPT Python SDK is available on [Python Package Index (PyPI)](https://pypi.org/project/cript/) @@ -29,18 +48,33 @@ pip install cript ## Documentation -To learn more about the CRIPT Python SDK please check the [CRIPT-SDK documentation](https://c-accel-cript.github.io/cript/) +To learn more about the CRIPT Python SDK please check the [CRIPT-SDK documentation](https://c-accel-cript.github.io/Python-SDK/) --- ## Release Notes -For updates and release notes please visit the [CRIPT blog](https://blog.criptapp.org) +Please visit the [GitHub Releases page](https://github.com/C-Accel-CRIPT/Python-SDK/releases/latest) for a detailed release notes. + +--- + +## We Invite Contribution + +To get started, feel free to take a look at our [Contribution Guidelines](CONTRIBUTING.md) for +a detailed guide on how to contribute to our repository and become a part of our community. + +Whether you want to report a bug, propose a new feature, or submit a pull request, your contribution is highly valued. + +For development documentation to better understand the Python SDK code please visit the +[Python SDK Wiki](https://github.com/C-Accel-CRIPT/Python-SDK/wiki). +If you encounter any issues please let us know via +[issues section](https://github.com/C-Accel-CRIPT/Python-SDK/issues) or +[discussion sections](https://github.com/C-Accel-CRIPT/Python-SDK/discussions). + +To learn more about our great community and all the open source plugins made by our fantastic community available +for the [CRIPT Python SDK](https://github.com/C-Accel-CRIPT/Python-SDK) please take a look at the +[plugins section](https://github.com/C-Accel-CRIPT/Python-SDK/discussions/categories/plugins). -### Software development +We appreciate your interest in contributing to our project! Together, let's make it even better! 🚀 -You are welcome to contribute code via PR to this repository. -For the developmet, we are using [trunk.io](https://trunk.io) to achieve a consistent coding style. -You can run `./trunk fmt` to auto-format your contributions and `./trunk check` to verify your contribution complies with our standard via trunk. -We will run the same test automatically before we are able to merge the code. -Please, let us know if there are any issues. +Happy coding! diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..ac047cd0d --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,15 @@ +# Security Policy + + + +## Reporting a Vulnerability + +If you find any security issues or vulnerabilities please report them to cript_report@mit.edu diff --git a/docs/api/api.md b/docs/api/api.md new file mode 100644 index 000000000..496934ead --- /dev/null +++ b/docs/api/api.md @@ -0,0 +1 @@ +::: cript.api.api diff --git a/docs/api/controlled_vocabulary_categories.md b/docs/api/controlled_vocabulary_categories.md new file mode 100644 index 000000000..e851edaea --- /dev/null +++ b/docs/api/controlled_vocabulary_categories.md @@ -0,0 +1 @@ +::: cript.VocabCategories diff --git a/docs/api/paginator.md b/docs/api/paginator.md new file mode 100644 index 000000000..e24e116f6 --- /dev/null +++ b/docs/api/paginator.md @@ -0,0 +1 @@ +::: cript.api.paginator diff --git a/docs/api/search_modes.md b/docs/api/search_modes.md new file mode 100644 index 000000000..6169fd264 --- /dev/null +++ b/docs/api/search_modes.md @@ -0,0 +1 @@ +::: cript.SearchModes diff --git a/docs/examples/.gitignore b/docs/examples/.gitignore new file mode 100644 index 000000000..50621420c --- /dev/null +++ b/docs/examples/.gitignore @@ -0,0 +1,2 @@ +*ipynb +*.ipynb_checkpoints \ No newline at end of file diff --git a/docs/examples/simulation.md b/docs/examples/simulation.md new file mode 100644 index 000000000..9c385a106 --- /dev/null +++ b/docs/examples/simulation.md @@ -0,0 +1,392 @@ +--- +jupyter: + jupytext: + cell_metadata_filter: -all + formats: ipynb,md + text_representation: + extension: .md + format_name: markdown + format_version: "1.3" + jupytext_version: 1.13.6 + kernelspec: + display_name: Python 3 (ipykernel) + language: python + name: python3 +--- + +!!! abstract + This tutorial guides you through an example material synthesis workflow using the + [CRIPT Python SDK](https://pypi.org/project/cript/). + + +## Installation + +Before you start, be sure the [cript python package](https://pypi.org/project/cript/) is installed. + +```bash +pip install cript +``` + +## Connect to CRIPT + +To connect to CRIPT, you must enter a `host` and an `API Token`. For most users, `host` will be `https://criptapp.org`. + +!!! Warning "Keep API Token Secure" + + To ensure security, avoid storing sensitive information like tokens directly in your code. + Instead, use environment variables. + Storing tokens in code shared on platforms like GitHub can lead to security incidents. + Anyone that possesses your token can impersonate you on the [CRIPT](https://criptapp.org/) platform. + Consider [alternative methods for loading tokens with the CRIPT API Client](https://c-accel-cript.github.io/Python-SDK/api/api/#cript.api.api.API.__init__). + In case your token is exposed be sure to immediately generate a new token to revoke the access of the old one + and keep the new token safe. + +```python +import cript + +with cript.API(host="https://api.criptapp.org/", api_token="123456", storage_token="987654") as api: + pass +``` + +!!! note + + You may notice, that we are not executing any code inside the context manager block. + If you were to write a python script, compared to a jupyter notebook, you would add all the following code inside that block. + Here in a jupyter notebook, we need to connect manually. We just have to remember to disconnect at the end. + +```python +api = cript.API(host="https://api.criptapp.org/", api_token=None, storage_token="123456") +api = api.connect() +``` + +## Create a Project + +All data uploaded to CRIPT must be associated with a [`Project`](../../nodes/primary_nodes/project) node. +[`Project`](../../nodes/primary_nodes/project) can be thought of as an overarching research goal. +For example, finding a replacement for an existing material from a sustainable feedstock. + +```python +# create a new project in the CRIPT database +project = cript.Project(name="My simulation project.") +``` + +## Create a [Collection node](../../nodes/primary_nodes/collection) + +For this project, you can create multiple collections, which represent a set of experiments. +For example, you can create a collection for a specific manuscript, +or you can create a collection for initial screening of candidates and one for later refinements etc. + +So, let's create a collection node and add it to the project. + +```python +collection = cript.Collection(name="Initial simulation screening") +# We add this collection to the project as a list. +project.collection += [collection] +``` + +!!! note "Viewing CRIPT JSON" + + Note, that if you are interested into the inner workings of CRIPT, + you can obtain a JSON representation of your data at any time to see what is being sent to the API + through HTTP JSON requests. + +```python +print(project.json) +``` + +!!! info "Format JSON in terminal" + Format the JSON within the terminal for easier reading + ```python + print(project.get_json(indent=2).json) + ``` + +## Create an [Experiment node](../../nodes/primary_nodes/experiment) + +The [Collection node](../../nodes/primary_nodes/collection) holds a series of +[Experiment nodes](../../nodes/primary_nodes/experiment) nodes. + +And we can add this experiment to the collection of the project. + +```python +experiment = cript.Experiment(name="Simulation for the first candidate") +collection.experiment += [experiment] +``` + +## Create relevant [Software nodes](../../nodes/primary_nodes/software) + +[`Software`](../../nodes/primary_nodes/software) nodes refer to software that you use during your simulation experiment. +In general [`Software`](../../nodes/primary_nodes/software) nodes can be shared between project, and it is encouraged to do so if the software you are using is already present in the CRIPT project use it. + +If They are not, you can create them as follows: + +```python +python = cript.Software(name="python", version="3.9") +rdkit = cript.Software(name="rdkit", version="2020.9") +stage = cript.Software(name="stage", source="https://doi.org/10.1021/jp505332p", version="N/A") +packmol = cript.Software(name="Packmol", source="http://m3g.iqm.unicamp.br/packmol", version="N/A") +openmm = cript.Software(name="openmm", version="7.5") +``` + +Generally, provide as much information about the software as possible this helps to make your results reproducible. +Even a software is not publicly available, like an in-house code, we encourage you to specify them in CRIPT. +If a version is not available, consider using git-hashes. + + + +## Create [Software Configuration](../../nodes/subobjects/software_configuration/) + +Now that we have our [`Software`](../../nodes/primary_nodes/software) nodes, we can create +[`SoftwareConfiguration`](../../nodes/subobjects/software_configuration/) nodes. [`SoftwareConfigurations`](../../nodes/subobjects/software_configuration/) nodes are designed to let you specify details, about which algorithms from the software package you are using and log parameters for these algorithms. + +The [`SoftwareConfigurations`](../../nodes/subobjects/software_configuration/) are then used for constructing our [`Computation`](../../nodes/primary_nodes/computation/) node, which describe the actual computation you are performing. + +We can also attach [`Algorithm`](../../nodes/subobjects/algorithm) nodes to a [`SoftwareConfiguration`](../../nodes/subobjects/software_configuration) +node. The [`Algorithm`](../../nodes/subobjects/algorithm) nodes may contain nested [`Parameter`](../../nodes/subobjects/parameter) nodes, as shown in the example below. + + + +```python +# create some software configuration nodes +python_config = cript.SoftwareConfiguration(software=python) +rdkit_config = cript.SoftwareConfiguration(software=rdkit) +stage_config = cript.SoftwareConfiguration(software=stage) + +# create a software configuration node with a child Algorithm node +openmm_config = cript.SoftwareConfiguration( + software=openmm, + algorithm=[ + cript.Algorithm( + key="energy_minimization", + type="initialization", + ), + ], +) +packmol_config = cript.SoftwareConfiguration(software=packmol) +``` + +!!! note "Algorithm keys" + The allowed [`Algorithm`](../../nodes/subobjects/algorithm/) keys are listed under [algorithm keys](https://criptapp.org/keys/algorithm-key/) in the CRIPT controlled vocabulary. + +!!! note "Parameter keys" + The allowed [`Parameter`](../../nodes/subobjects/property/) keys are listed under [parameter keys](https://criptapp.org/keys/parameter-key/) in the CRIPT controlled vocabulary. + + +## Create [Computations](../../nodes/primary_nodes/computation) + +Now that we've created some [`SoftwareConfiguration`](../../nodes/subobjects/software_configuration) nodes, we can used them to build full [`Computation`](../../nodes/primary_nodes/computation) nodes. +In some cases, we may also want to add [`Condition`](../../nodes/subobjects/condition) nodes to our computation, to specify the conditions at which the computation was carried out. An example of this is shown below. + + +```python +# Create a ComputationNode +# This block of code represents the computation involved in generating forces. +# It also details the initial placement of molecules within a simulation box. +init = cript.Computation( + name="Initial snapshot and force-field generation", + type="initialization", + software_configuration=[ + python_config, + rdkit_config, + stage_config, + packmol_config, + openmm_config, + ], +) + +# Initiate the simulation equilibration using a separate node. +# The equilibration process is governed by specific conditions and a set equilibration time. +# Given this is an NPT (Number of particles, Pressure, Temperature) simulation, conditions such as the number of chains, temperature, and pressure are specified. +equilibration = cript.Computation( + name="Equilibrate data prior to measurement", + type="MD", + software_configuration=[python_config, openmm_config], + condition=[ + cript.Condition(key="time_duration", type="value", value=100.0, unit="ns"), + cript.Condition(key="temperature", type="value", value=450.0, unit="K"), + cript.Condition(key="pressure", type="value", value=1.0, unit="bar"), + cript.Condition(key="number", type="value", value=31), + ], + prerequisite_computation=init, +) + +# This section involves the actual data measurement. +# Note that we use the previously computed data as a prerequisite. Additionally, we incorporate the input data at a later stage. +bulk = cript.Computation( + name="Bulk simulation for measurement", + type="MD", + software_configuration=[python_config, openmm_config], + condition=[ + cript.Condition(key="time_duration", type="value", value=50.0, unit="ns"), + cript.Condition(key="temperature", type="value", value=450.0, unit="K"), + cript.Condition(key="pressure", type="value", value=1.0, unit="bar"), + cript.Condition(key="number", type="value", value=31), + ], + prerequisite_computation=equilibration, +) + +# The following step involves analyzing the data from the measurement run to ascertain a specific property. +ana = cript.Computation( + name="Density analysis", + type="analysis", + software_configuration=[python_config], + prerequisite_computation=bulk, +) + +# Add all these computations to the experiment. +experiment.computation += [init, equilibration, bulk, ana] +``` + + +!!! note "Computation types" + The allowed [`Computation`](../../nodes/primary_nodes/computation) types are listed under [computation types](https://criptapp.org/keys/computation-type/) in the CRIPT controlled vocabulary. + +!!! note "Condition keys" + The allowed [`Condition`](../../nodes/subobjects/condition) keys are listed under [condition keys](https://criptapp.org/keys/condition-key/) in the CRIPT controlled vocabulary. + + +## Create and Upload [Files nodes](../../nodes/supporting_nodes/file) + +New we'd like to upload files associated with our simulation. First, we'll instantiate our File nodes under a specific project. + +```python +packing_file = cript.File(name="Initial simulation box snapshot with roughly packed molecules", type="computation_snapshot", source="path/to/local/file") +forcefield_file = cript.File(name="Forcefield definition file", type="data", source="path/to/local/file") +snap_file = cript.File(name="Bulk measurement initial system snap shot", type="computation_snapshot", source="path/to/local/file") +final_file = cript.File(name="Final snapshot of the system at the end the simulations", type="computation_snapshot", source="path/to/local/file") +``` + +!!! note + The [source field](../../nodes/supporting_nodes/file/#cript.nodes.supporting_nodes.file.File.source) should point to any file on your local filesystem + or a web URL to where the file can be found. + + > For example, + > [CRIPT protein JSON file on CRIPTScripts](https://criptscripts.org/cript_graph_json/JSON/cao_protein.json) + +Note, that we haven't uploaded the files to CRIPT yet, this is automatically performed, when the project is uploaded via `api.save(project)`. + + +## Create Data + +Next, we'll create a [`Data`](../../nodes/primary_nodes/data) node which helps organize our [`File`](../../nodes/supporting_nodes/file) nodes and links back to our [`Computation`](../../nodes/primary_nodes/computation) objects. + +```python +packing_data = cript.Data( + name="Loosely packed chains", + type="computation_config", + file=[packing_file], + computation=[init], + notes="PDB file without topology describing an initial system.", +) + +forcefield_data = cript.Data( + name="OpenMM forcefield", + type="computation_forcefield", + file=[forcefield_file], + computation=[init], + notes="Full forcefield definition and topology.", +) + +equilibration_snap = cript.Data( + name="Equilibrated simulation snapshot", + type="computation_config", + file=[snap_file], + computation=[equilibration], +) + +final_data = cript.Data( + name="Logged volume during simulation", + type="computation_trajectory", + file=[final_file], + computation=[bulk], +) +``` + +!!! note "Data types" + The allowed [`Data`](../../nodes/primary_nodes/data) types are listed under the [data types](https://criptapp.org/keys/data-type/) in the CRIPT controlled vocabulary. + +Next, we'll link these [`Data`](../../nodes/primary_nodes/data) nodes to the appropriate [`Computation`](../../nodes/primary_nodes/computation) nodes. + +```python + +# Observe how this step also forms a continuous graph, enabling data to flow from one computation to the next. +# The sequence initiates with the computation process and culminates with the determination of the material property. +init.output_data = [packing_data, forcefield_data] +equilibration.input_data = [packing_data, forcefield_data] +equilibration.output_data = [equilibration_snap] +ana.input_data = [final_data] +bulk.output_data = [final_data] +``` + +## Create a virtual Material + +First, we'll create a virtual material and add some +[`Identifiers`](../../nodes/primary_nodes/material/#cript.nodes.primary_nodes.material.Material.identifiers) +to the material to make it easier to identify and search. + +```python +# create identifier dictionaries and put it in `identifiers` variable +identifiers = [{"names": ["poly(styrene)", "poly(vinylbenzene)"]}] +identifiers += [{"bigsmiles": "[H]{[>][<]C(C[>])c1ccccc1[<]}C(C)CC"}] +identifiers += [{"chem_repeat": ["C8H8"]}] + +# create a material node object with identifiers +polystyrene = cript.Material(name="virtual polystyrene", identifiers=identifiers) +``` + +!!! note "Identifier keys" + The allowed [`Identifiers`](../../nodes/primary_nodes/material/#cript.nodes.primary_nodes.material.Material.identifiers) keys are listed in the [material identifier keys](https://criptapp.org/keys/material-identifier-key/) in the CRIPT controlled vocabulary. + +## Add [`Property`](../../nodes/subobjects/property) sub-objects +Let's also add some [`Property`](../../nodes/subobjects/property) nodes to the [`Material`](../../nodes/primary_nodes/material), which represent its physical or virtual (in the case of a simulated material) properties. + +```python +phase = cript.Property(key="phase", value="solid", type="none", unit=None) +color = cript.Property(key="color", value="white", type="none", unit=None) + +polystyrene.property += [phase] +polystyrene.property += [color] +``` + +!!! note "Material property keys" + The allowed material [`Property`](../../nodes/subobjects/property) keys are listed in the [material property keys](https://criptapp.org/keys/material-property-key/) in the CRIPT controlled vocabulary. + +## Create [`ComputationalForcefield`](../../nodes/subobjects/computational_forcefield) +Finally, we'll create a [`ComputationalForcefield`](../../nodes/subobjects/computational_forcefield) node and link it to the Material. + + +```python +forcefield = cript.ComputationalForcefield( + key="opls_aa", + building_block="atom", + source="Custom determination via STAGE", + data=[forcefield_data], +) + +polystyrene.computational_forcefield = forcefield +``` + +!!! note "Computational forcefield keys" + The allowed [`ComputationalForcefield`](../../nodes/subobjects/computational_forcefield/) keys are listed under the [computational forcefield keys](https://criptapp.org/keys/computational-forcefield-key/) in the CRIPT controlled vocabulary. + +Now we can save the project to CRIPT (and upload the files) or inspect the JSON output +## Validate CRIPT Project Node +```python +# Before we can save it, we should add all the orphaned nodes to the experiments. +# It is important to do this for every experiment separately, but here we only have one. +cript.add_orphaned_nodes_to_project(project, active_experiment=experiment) +project.validate() + +# api.save(project) +print(project.get_json(indent=2).json) + +# Let's not forget to close the API connection after everything is done. +api.disconnect() +``` + +## Conclusion + +You made it! We hope this tutorial has been helpful. + +Please let us know how you think it could be improved. +Feel free to reach out to us on our [CRIPT Python SDK GitHub](https://github.com/C-Accel-CRIPT/Python-SDK). +We'd love your inputs and contributions! \ No newline at end of file diff --git a/docs/examples/synthesis.md b/docs/examples/synthesis.md new file mode 100644 index 000000000..62bded0ef --- /dev/null +++ b/docs/examples/synthesis.md @@ -0,0 +1,296 @@ +--- +jupyter: + jupytext: + cell_metadata_filter: -all + formats: ipynb,md + text_representation: + extension: .md + format_name: markdown + format_version: "1.3" + jupytext_version: 1.13.6 + kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +!!! abstract + This tutorial guides you through an example material synthesis workflow using the + [CRIPT Python SDK](https://pypi.org/project/cript/). + + +## Installation + +Before you start, be sure the [cript python package](https://pypi.org/project/cript/) is installed. + +```bash +pip install cript +``` + +## Connect to CRIPT + +To connect to CRIPT, you must enter a `host` and an `API Token`. For most users, `host` will be `https://criptapp.org`. + +!!! Warning "Keep API Token Secure" + + To ensure security, avoid storing sensitive information like tokens directly in your code. + Instead, use environment variables. + Storing tokens in code shared on platforms like GitHub can lead to security incidents. + Anyone that possesses your token can impersonate you on the [CRIPT](https://criptapp.org/) platform. + Consider [alternative methods for loading tokens with the CRIPT API Client](https://c-accel-cript.github.io/Python-SDK/api/api/#cript.api.api.API.__init__). + In case your token is exposed be sure to immediately generate a new token to revoke the access of the old one + and keep the new token safe. + +```python +import cript + +with cript.API(host="https://api.criptapp.org/", api_token="123456", storage_token="987654") as api: + pass +``` + +!!! note + + You may notice, that we are not executing any code inside the context manager block. + If you were to write a python script, compared to a jupyter notebook, you would add all the following code inside that block. + Here in a jupyter notebook, we need to connect manually. We just have to remember to disconnect at the end. + +```python +api = cript.API(host="https://api.criptapp.org/", api_token=None, storage_token="123456") +api = api.connect() +``` + +## Create a Project + +All data uploaded to CRIPT must be associated with a [project](../../nodes/primary_nodes/project) node. +[Project](../../nodes/primary_nodes/project) can be thought of as an overarching research goal. +For example, finding a replacement for an existing material from a sustainable feedstock. + +```python +# create a new project in the CRIPT database +project = cript.Project(name="My first project.") +``` + +## Create a Collection node + +For this project, you can create multiple collections, which represent a set of experiments. +For example, you can create a collection for a specific manuscript, +or you can create a collection for initial screening of candidates and one for later refinements etc. + +So, let's create a collection node and add it to the project. + +```python +collection = cript.Collection(name="Initial screening") +# We add this collection to the project as a list. +project.collection += [collection] +``` + +!!! note "Viewing CRIPT JSON" + + Note, that if you are interested into the inner workings of CRIPT, + you can obtain a JSON representation of your data graph at any time to see what is being sent to the API. + +```python +print(project.json) +print("\nOr more pretty\n") +print(project.get_json(indent=2).json) +``` + +## Create an Experiment node + +The [collection node](../../nodes/primary_nodes/collection) holds a series of +[Experiment nodes](../../nodes/primary_nodes/experiment) nodes. + +And we can add this experiment to the collection of the project. + +```python +experiment = cript.Experiment(name="Anionic Polymerization of Styrene with SecBuLi") +collection.experiment += [experiment] +``` + +## Create an Inventory + +An [Inventory](../../nodes/primary_nodes/inventory) contains materials, +that are well known and usually not of polymeric nature. +They are for example the chemical you buy commercially and use as input into your synthesis. + +For this we create this inventory by adding the [Material](../../nodes/primary_nodes/material) we need one by one. + +```python +# create a list of identifiers as dictionaries to +# identify your material to the community and your team +my_solution_material_identifiers = [ + {"chemical_id": "598-30-1"} +] + +solution = cript.Material( + name="SecBuLi solution 1.4M cHex", + identifiers=my_solution_material_identifiers +) +``` + +These materials are simple, notice how we use the SMILES notation here as an identifier for the material. +Similarly, we can create more initial materials. + +```python +toluene = cript.Material(name="toluene", identifiers=[{"smiles": "Cc1ccccc1"}, {"pubchem_id": 1140}]) +styrene = cript.Material(name="styrene", identifiers=[{"smiles": "c1ccccc1C=C"}, {"inchi": "InChI=1S/C8H8/c1-2-8-6-4-3-5-7-8/h2-7H,1H2"}]) +butanol = cript.Material(name="1-butanol", identifiers=[{"smiles": "OCCCC"}, {"inchi_key": "InChIKey=LRHPLDYGYMQRHN-UHFFFAOYSA-N"}]) +methanol = cript.Material(name="methanol", identifiers=[{"smiles": "CO"}, {"names": ["Butan-1-ol", "Butyric alcohol", "Methylolpropane", "n-Butan-1-ol", "methanol"]}]) +``` + +Now that we defined those materials, we can combine them into an inventory +for easy access and sharing between experiments/projects. + +```python +inventory = cript.Inventory( + name="Common chemicals for poly-styrene synthesis", + material=[solution, toluene, styrene, butanol, methanol], +) +collection.inventory += [inventory] +``` + +## Create a Process node + +A [Process](../../nodes/primary_nodes/process) is a step in an experiment. +You decide how many [Process](../../nodes/primary_nodes/process) are required for your experiment, +so you can list details for your experiment as fine-grained as desired. +Here we use just one step to describe the entire synthesis. + +```python +process = cript.Process( + name="Anionic of Synthesis Poly-Styrene", + type="multistep", + description="In an argon filled glove box, a round bottom flask was filled with 216 ml of dried toluene. The " + "solution of secBuLi (3 ml, 3.9 mmol) was added next, followed by styrene (22.3 g, 176 mmol) to " + "initiate the polymerization. The reaction mixture immediately turned orange. After 30 min, " + "the reaction was quenched with the addition of 3 ml of methanol. The polymer was isolated by " + "precipitation in methanol 3 times and dried under vacuum.", +) +experiment.process += [process] +``` + +## Add Ingredients to a Process + +From a chemistry standpoint, most experimental processes, regardless of whether they are carried out in the lab +or simulated using computer code, consist of input ingredients that are transformed in some way. +Let's add ingredients to the [Process](../../nodes/primary_nodes/process) that we just created. +For this we use the materials from the inventory. +Next, define [Quantities](../../nodes/subobjects/quantity) nodes indicating the amount of each +[Ingredient](../../nodes/subobjects/ingredient) that we will use in the [Process](../../nodes/primary_nodes/process). + +```python +initiator_qty = cript.Quantity(key="volume", value=1.7e-8, unit="m**3") +solvent_qty = cript.Quantity(key="volume", value=1e-4, unit="m**3") +monomer_qty = cript.Quantity(key="mass", value=0.455e-3, unit="kg") +quench_qty = cript.Quantity(key="volume", value=5e-3, unit="m**3") +workup_qty = cript.Quantity(key="volume", value=0.1, unit="m**3") +``` + +Now we can create an [Ingredient](../../nodes/subobjects/ingredient) +node for each ingredient using the [Material](../../nodes/primary_nodes/material) +and [quantities](../../nodes/subobjects/quantity) attributes. + +```python +initiator = cript.Ingredient( + keyword=["initiator"], material=solution, quantity=[initiator_qty] +) + +solvent = cript.Ingredient( + keyword=["solvent"], material=toluene, quantity=[solvent_qty] +) + +monomer = cript.Ingredient( + keyword=["monomer"], material=styrene, quantity=[monomer_qty] +) + +quench = cript.Ingredient( + keyword=["quench"], material=butanol, quantity=[quench_qty] +) + +workup = cript.Ingredient( + keyword=["workup"], material=methanol, quantity=[workup_qty] +) + +``` + +Finally, we can add the `Ingredient` nodes to the `Process` node. + +```python +process.ingredient += [initiator, solvent, monomer, quench, workup] +``` + +## Add Conditions to the Process + +Its possible that our `Process` was carried out under specific physical conditions. We can codify this by adding +[Condition](../../nodes/subobjects/condition) nodes to the process. + +```python +temp = cript.Condition(key="temperature", type="value", value=25, unit="celsius") +time = cript.Condition(key="time_duration", type="value", value=60, unit="min") +process.condition = [temp, time] +``` + +## Add a Property to a Process + +We may also want to associate our process with certain properties. We can do this by adding +[Property](../../nodes/subobjects/property) nodes to the process. + +```python +yield_mass = cript.Property(key="yield_mass", type="number", value=47e-5, unit="kilogram", method="scale") +process.property += [yield_mass] +``` + +## Create a Material node (process product) + +Along with input [Ingredients](../../nodes/subobjects/ingredient), our [Process](../../nodes/primary_nodes/process) +may also produce product materials. + +First, let's create the [Material](../../nodes/primary_nodes/material) +that will serve as our product. We give the material a `name` attribute and add it to our +[Project]((../../nodes/primary_nodes/project). + +```python +polystyrene = cript.Material(name="polystyrene", identifiers=[]) +project.material += [polystyrene] +``` + +Let's add some `Identifiers` to the material to make it easier to identify and search. + +```python +# create a name identifier +polystyrene.identifiers += [{"names": ["poly(styrene)", "poly(vinylbenzene)"]}] + +# create a BigSMILES identifier +polystyrene.identifiers += [{"bigsmiles": "[H]{[>][<]C(C[>])c1ccccc1[<]}C(C)CC"}] +# create a chemical repeat unit identifier +polystyrene.identifiers += [{"chem_repeat": ["C8H8"]}] +``` + +Next, we'll add some [Property](../../nodes/subobjects/property) nodes to the +[Material](../../nodes/primary_nodes/material) , which represent its physical or virtual +(in the case of a simulated material) properties. + +```python +# create a phase property +phase = cript.Property(key="phase", value="solid", type="none", unit=None) +# create a color property +color = cript.Property(key="color", value="white", type="none", unit=None) + +# add the properties to the material +polystyrene.property += [phase, color] +``` + +**Congratulations!** You've just created a process that represents the polymerization reaction of Polystyrene, starting with a set of input ingredients in various quantities, and ending with a new polymer with specific identifiers and physical properties. + +Now we can save the project to CRIPT via the api object. + +```python +project.validate() +print(project.get_json(indent=2, condense_to_uuid={}).json) +# api.save(project) +``` + +```python +# Don't forget to disconnect once everything is done +api.disconnect() +``` diff --git a/docs/exceptions/api_exceptions.md b/docs/exceptions/api_exceptions.md new file mode 100644 index 000000000..ece28dddc --- /dev/null +++ b/docs/exceptions/api_exceptions.md @@ -0,0 +1,3 @@ +## API Client Exceptions + +::: cript.api.exceptions diff --git a/docs/exceptions/node_exceptions.md b/docs/exceptions/node_exceptions.md new file mode 100644 index 000000000..3f0e185e8 --- /dev/null +++ b/docs/exceptions/node_exceptions.md @@ -0,0 +1,3 @@ +# Node Exceptions + +::: cript.nodes.exceptions diff --git a/docs/extra.css b/docs/extra.css new file mode 100644 index 000000000..93eb0db23 --- /dev/null +++ b/docs/extra.css @@ -0,0 +1,3 @@ +.screenshot-border { + border: black solid 0.1rem; +} diff --git a/docs/faq.md b/docs/faq.md new file mode 100644 index 000000000..638f0ba5e --- /dev/null +++ b/docs/faq.md @@ -0,0 +1,87 @@ +# Frequently Asked Questions + +
+ +**Q:** Where can I find more information about the [CRIPT](https://criptapp.org) data model? + +**A:** _Please feel free to review the +[CRIPT data model document](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf) +and the [CRIPT research paper](https://pubs.acs.org/doi/10.1021/acscentsci.3c00011)_ + +--- + +**Q:** What does this error mean? + +**A:** _Please visit the Exceptions documentation_ + +--- + +**Q:** Where do I report an issue that I encountered? + +**A:** _Please feel free to report issues to our [GitHub repository](https://github.com/C-Accel-CRIPT/Python-SDK)._ +_We are always looking for ways to improve and create software that is a joy to use!_ + +--- + +**Q:** Where can I find more CRIPT examples? + +**A:** _Please visit [CRIPT Scripts](https://criptscripts.org) where there are many CRIPT examples ranging from CRIPT graphs drawn out from research papers, Python scripts, TypeScript scripts, and more!_ + +--- + +**Q:** Where can I find more example code? + +**A:** _We have written a lot of tests for our software, and if needed, those tests can be referred to as example code to work with the Python SDK software. The Python SDK tests are located within the [GitHub repository/tests](https://github.com/C-Accel-CRIPT/Python-SDK/tree/main/tests), and there they are broken down to different kinds of tests_ + +--- + +**Q:** How can I contribute to this project? + +**A:** _We would love to have you contribute. +Please read the[GitHub repository wiki](https://github.com/C-Accel-CRIPT/Python-SDK/wiki) +to understand more and get started. Feel free to contribute to any bugs you find, any issues within the +[GitHub repository](https://github.com/C-Accel-CRIPT/Python-SDK/issues), or any features you want._ + +--- + +**Q:** This repository is awesome, how can I build a plugin to add to it? + +**A:** _We have built this code with plugins in mind! Please visit the +[CRIPT Python SDK GitHub repository Wiki](https://github.com/C-Accel-CRIPT/Python-SDK/wiki) +tab for developer documentation._ + +--- + +**Q:** I have this question that is not covered anywhere, where can I ask it? + +**A:** _Please visit the [CRIPT Python SDK repository](https://github.com/C-Accel-CRIPT/Python-SDK) +and ask your question within the +[discussions tab Q/A section](https://github.com/C-Accel-CRIPT/Python-SDK/discussions/categories/q-a)_ + +--- + +**Q:** Where is the best place where I can contact the CRIPT Python SDK team for questions or support? + +**A:** _We would love to hear from you! Please visit our [CRIPT Python SDK Repository GitHub Discussions](https://github.com/C-Accel-CRIPT/cript-excel-uploader/discussions) to easily send us questions. +Our [repository's issue page](https://github.com/C-Accel-CRIPT/Python-SDK/issues) is also another good way to let us know about any issues or suggestions you might have. +A GitHub account is required._ + +--- + +**Q:** How can I report security issues? + +**A:** _Please visit the [CRIPT Python SDK GitHub repository security tab](https://github.com/C-Accel-CRIPT/Python-SDK/security) for any security issues._ + +--- + +**Q:** Besides the user documentation are there any developer documentation that I can read through on how +the code is written to get a better grasp of it? + +**A:** _You bet! There are documentation for developers within the +[CRIPT Python SDK Wiki](https://github.com/C-Accel-CRIPT/Python-SDK/wiki). +There you will find documentation on everything from how our code is structure, +how we aim to write our documentation, CI/CD, and more._ + +_We try to also have type hinting, comments, and docstrings for all the code that we work on so it is clear and easy for anyone reading it to easily understand._ + +_if all else fails, contact us on our [GitHub Repository](https://github.com/C-Accel-CRIPT/Python-SDK)._ diff --git a/docs/images/CRIPT_full_logo_colored_transparent.png b/docs/images/CRIPT_full_logo_colored_transparent.png new file mode 100644 index 0000000000000000000000000000000000000000..942727248ce04b4de33bf3ba68bc405c6fed2c9c GIT binary patch literal 30023 zcmb@tg(~IuZZ?$dwdjwE=(>3IKRK#CRa3 zTFEyN{O^{NqTyQr;A6e{hvShapgfH+)_&ESQ%a3lEl9^T+1(&R7M;3wYfBii^uqQy_V z$zP%w{QDr)>@C{xQMo-xw#8qv$ycn=PogPMwmC??1^ffIPpEpDpw1#RnhiW$mC^ml7Z4Ojwj56p1Z^3~mqn03zhEUzc1gr0XDh&bJwSijo zVTMf~^&3N8Gz4hY25QuQP_GS8tGi*PROhEugEnstHLCmgsv+cMU8rFL*fZFmF3F)2 zg=h>%*8h)~;@BFG_zrUZAHz@Y+vDFiCcXKd?9=ked^^)r}^E`QvzDSZ}2ZW z{D!zPzcV-XXJTNrzrTN|YpJ(suBl|SI%lXXwZAZ~2ki5|{>Oyh?}3K%dZ(?f1K{Hm z5Rs6O-=U#_(lIbGKVW5JW9Q`Ng$ao~dHPgBLQ+OnUQtO&NmX54T|-MpM^8^*-{93N zBV$uDGX%oI($d=6#>Uq6jlF}Dv&&mIcXtmD5AXkdeE#?G4FEsEA>lDGadF8h`IS|* zU0uBcV>26P-t$`k;4-KrE2HZ%y)~!iz@SG>us9 zx482z;PwGCea{2<9-}z;&GDTBm(&nVeT$l-yXdrb-e;jUID;X*Wmgw4wR8j(Z2NP1 z$_)4iwNRKft$OR=0*XrTkp1K199dZ&zgqxvx*>qIlW64Fm+^C91f+#yVW@9uNvGY8 z{J21mJUm2;!tL|-)_)^a01z94G7oBYV2;fpRO{nknk4@QVd1D;_w1=Uc$$2%|S4WKi0FOZ}ktkH%%{(~QzyXjCpq+&` zP1~sea2K3lMLSi&1Bgx=6Nc1}3V$O5nD3;+jH*ZTg$VuK4D16dRqUS^)8YWR*@7WA z*O$Ky0l-iZvv-y!+-DCA;r2p8RP1w({4VJL>AO29^NLYnlizqiPlqi%S>kldcP#)= zs>}Cp*Pcn*Fl{9TmuV!Yb)rSg@9s@bQJ^K$Te_D@k78RpzUWqt> zu9}sqj-<0$i4Y<14kNU>=b^K_Qv&k+G?;9;wXG1=IGUM30BG{}Cd=Dhdbpa})sq7$ zkDwzVt4-h2Hy2t-fUK9ML@VwUR1bGdZUI@ZO%`Jou*J^}myHTwrJJfkS6zs&x{rN99M#hPr#3i{|z>zFeUUv|TpL35__+ zOt`^KWM$vUJm@=f5N}%L#{sJUEC;#85g!d|6C!6+ zG1|`RUJEAi$w3D<&}(l@D|eH#V}o_mMs2I;sfN9#Nu(K#WlLs=yGtJ;Dn(1dzRc!qsM*NyIsno?eOfM{*n%E4y$ zSygyq1l6m-g12VkewTazb19#@umc;kuPf~Smg+`y$HXqs!{;jOzzXGGS^kz^HA*vV zdfG+=kZ-(BKbo%#24{ODT$EQsm3abWk=s-`U)@kFi^u^+dD>0XqkF&OD$UX+Mjj({=3S-duXah zh$LzoHS3~MN)>zHTS=d*R*VR=F;{W(IB}W*g6AI*9_4*c=@!P2H>#j&^!U8Uj7)m7 z!9bSIPz=#7T4cjp-Ao2po79@*+MwgyFagf$uYP96_u^v*RH!gObn)ZCPbdOxTBPq} z`XEc!W+oV|_s}{fI>Z=GHQS=?o2B9FeO;s~I8q+e!K~q{EH-F@8oVpM{LP@YU`!QeJG;XLET-~C23LUm# zMaT)5$=Wcus<>KhB|e}6fzeDaJCNEFbj2iGQ>(faZC@4x09{={PdsW)J9q!|JmN0Z zup^l^AkBJ09ahFUtDOtGFN}#j9tWR@T6D%5d9E%AM6}0-JIu~h%AAZWq_P|vX@n` zY`q2+M$Me}AwM++@c`>`A@x{rbb^@ow;O1bMqklAQo`ukJ4%7}21Hn(=NHB#ucYQ* zn%*GgP`30-Q~{p4nh&;egagy60 zEEXZg6*WkdArZmNonIw%qLmHir4s6%%U85f4lyr`uAIYtPkhhTxL9COKe|#x)1oH? zw){XlZIbZ^W30R7PUi0VE!b>O2`mgC-MMNCsGX@Y+0G(><9SX6GnQfsjx z@Wa*Y!M*KHENpi-U~Md4m=J{noT@aaVIi@wXW#RM0bucVFQomo4m|)kg(GMGScZqS z-H^TE!>aXbNgHnA#55UWQkF)1^|dTljcy?~41*pv4tvW|xD1H~WH`TNk9~HLw3XD( zo;nQx*5iLK$~iihabqh}>G>X2tbb_8@Fw0cVS(stnI@cLl33h~iM0NrF?_ znkaz;mW2KLvWlH;xBz2i%P7f@D5uyJ-Ch96rSIpKWo2gxz;TsW1JI`*AKbru+DH~ zF_>&NTVifSx0+wG6ZpBD5(PdhEIs+jPDUSxDo%F7{iiIOIz2BHCzw-G9))%%0HmGr z9m7B=1Dy5jY>PdSeJ1_w@f zt?%iUp$dF_(DkjwiLzI252mBWG9*{G7r0Y!NekmYpQU>7W(8Q|4vU5$(~5&ksT1!F zkU1(ayO{GoF@o+J`Kn=TypMmoF$H0p{7C$I3pXIc-ed{!pL&7S=IgxiOn^0ZZ zrbFY7|C{Eat91qsc!*SusvQRa`lSq9;q4X{vbcc|zHqWNhKc{@3B$mEz6Zpu`uidp-26b)PWCMK0yX zDt0T9t07`xE9}pXSA!mq5Va_|%@mghUIDk}A! z%6a?vkO^P_85Ulm-_&Ygl316Q?_07Xn8TfW(rgU+iH znc)eZv~yyb7{VQ{uuNwEo*2{}UjeuJBK!EG$E53$9rC#!=%1Jxeq;#5uwgZv)>zxj zwp?^(Dp+$?p))>@>SS|2@VnSNW5L;f0QIwbrovfgW;8uI-qG0zf1CN1De{iZVpN(b zv%d$(*iC>BloGpSJQY7+@B5<{6aAh4Hc(dez)U7*Jg`ziY(9a0ZeM<(2U^8>%K6hv z0;i8QwvbKsxA*67!-Db^M3N=*3_UUCR%+_^>>Lhhd=Pb9xr_<;vZ2U+VJv}tEd~GZCfFjIm zQ4H2#kPm)AaV^d&(7C9IP5{1sfb>sD1f=hSut>GNgwbR90SF0;nGylpH0EZIZ(xzu zp1W;^Ct)$SDei(MX0`xFOu=i82e4F^EJLC0OfW-zVxllY>bubbv9K>QFXXQ5Y&RoC z{>k$So&@{<$Lcru8g|Bqh!UQVN;FDKE1%BLtLa=l=|SDrzUQJsvP&hJ21JA|pHov!|ETi) zwW>Pm>toOFZV&+qDAnix4O=o33`SQ}Hu z%JfV3XZWX322cbVH=V55UDS^z!&w3EjR}jWDr`4CR?(@03~Z!+k15Q}frln7)otN0 zJM`-S=FHH}8!smpCS{yodVfI{nFj**8eI8aXWf+&u6AXj_VN5~+4u};zzuwcMN_Py zw7TqBleVUo%8Vd$o9Bz@6DkaJ9gG0_Ty+~@OJK zwe%rz;=beQW3!8M$#zjovX9GmRnQ+!!6h!Im~*R7hu3k^Fiyd4{Z#fNL=6Ebg74n< zF3eU!eEg`F{9#EF%iaDQbk~@a3Yt7$vO0exDtI=UUf3(&f?W8!MO1#%3$H7dB*<-7 zo?P6wM#|{sHSudT$)2myr+Z>#O>%JgIP6B_%U|GD-vjF6-SxKGLhj#jO(*jC$YiAx ziJjcbMr>JpEPcN@j8ywMT>kBDlwI~ibO8vM+wsD!>bhu?=3jXU)nY24bSaqM4bfL8 zMoBMPnzuj48{iOsMl>bs<^ zTxmnb7J@{rA;p_Im-!C&qZat@+7Jt!_H;$ilKNTiqX}fAkSNS#lF@NntVz?*kDo~~ zga*gYH@oY(!yI{-ec2E~>xvP7=1=U}L=kOuk7<))EL&v3S+;gdb{!5s5rD`Hq2Qpo zsuT}N!wCDlj%){i^0ZX?PlDN%=5)QEgeX(STcuuQyAJy5LilvvOM;uJV%`=w%+MU? zV46zTh(5-;{JGi#!7xG>fwxTuRa{AT7Yw9l!9+@mdy+)H2EL<__#^sGwTHixCUoxt zO5C^?kHP(7h0?1=L*Tft>+rhCx72b)g2jaVMJya=4}5C7w%WbY!Zv&s1u|tg<4vOV zg1-I6Hs>nc5jc3T0?qE>Vw;7|en~Q;qs8R4;l|uo+(<48Vg7Ewf?>z>VD~rB?{*s( zYC~RK%zLhD)L(gf>%8D&bevMuN;@Gc=i|fu5UMa2@kt zvn@=b(wd4pR1b*xN~QB@uDJW1k@%k9O7II%FFYI^s@uL@r8J}%$fUb?1KQYT4F zBR#^On#8)1a3mmAry|E+bMko!{c#x-e(4#~Wjy_c%pROzHUf2?7$*TfbRA6xkTkJ@ zIim7GBIpU%+a`;>ve%Xc|7kN%hw=H57D+Zgww<8O#6&F-TmT~?c?i{MGQ>2)JSOn0 zobPZ~Jt2OXJqCHn29nbaqWc(2uJ}*i51}S-guwSd?-7(--B!DTvX6JJAW<~xclu?u zL36)OlElAfMNtlET?};S>?9B&r?kq~pQ?tnhG@ev@38er_@yzZqPE_bB`@adwuZTH zh&})sq_vwux#Q!Sx3lzHw1ti-8RQV4@bC3SpvAFNAXkOqWa9rw1vLmccmd(Af4S$* z3Enq3cs@lH>YgIl<|26e&6~#Msc>etnROIT)ssQk} z1TXXAwr23g-_Z!Y)6lng&BUK+~SiECh*oHQ3N+n1tud|XqWrvQ$7aPhGxxg74wub$y{3-1U z05-%w>qiedEGQSfCe?8!)nug95Ue|Fi2NvAKvOW^_nglQ0BU1VHiM!F&nHuFr*a;K z;-q`fOAE3gY+b(6^!;FPgf_kTkyp>zu#eFRa8tzSuFCOAc!A1I%_UB;An2RkYv3noj|Ga3;h2V5`{zmkh{0_6J3OE4hsK;AT=+h8X5dj7O9ecreZbiO74p!9$0j!IE zCd=D*8$E&C`mc?u?Ct#GT%S1rd`mzPjqur|Ks_evuIGy?`?bo2fon;5_S5yA?8 zng9`!e)LlH4f|f^j%K-(jvt{$nwyNxW=pX9(L_;&q&;i^(0qeV z>7Drq_Y3*5e$FSk#wi4p9V4W5no0!lTOZ)#Eeb5oes z`%Q@2#$sS`Ga*s(_M8J04`_?~Yq2HB1OO5A*is{=bL$!~>EY4s8J=d*6qfhcHILY- zr`G?Y!C~AlM5y@mlfVgEsF)wo))AIV^0r|ALF!0a^ctB-j>YU?CZn{zb}LeK)2nHf ziR$kw2Xta(3=C+q^7vS*O^%-7!G=VN;+if$1(H`QG4w^#r33KDySt+PL`}zYmK<$& z$6q?U%H>74gJKEgCWxU^c@X}W4p=nDaY=^8kUK; z#J?Td%a}#FCXw+J()Cu@BG8oDxy~vUO%kYq5B?Knco`O6%Fjs0GcbNpmZIK73`^&F zcGxKESu{Bns``5Y$L1Bxeyx1T+zaA0jT5{af}2`20*4M%w=&!p-1Nj99Vk`_mbZBQH$g2bj}4$nFNjPY!`3+nf~g z)p`wiZPWY{6*@W5#pu??U8}yZKb@k0$&mP#op>ty@Xu`o&zD`eWM^F+;ZWGjc4*#( zff~7zV?mJ_Q4yNm{^xY-B0Uzqpz^o7Q!i{)WF05gZ(yrFR8%g7pNpNuw z!xd6H?wvJakNNgF@zm}i4GPQGciO{q{GM9$U@u)N=hM!Eve!hl&^V0qIkiOB{*NCX zZ=qCQCpWbgr5SC*bQ2(GgGM zMEmKPNjSSq`9dl8b;7EOkDIbcU!V-00s4)6X1Qf5REskM<61Cl3P{ z8D?|$+=X`X{HHM4iK4Z4TSt{X%)WzWrgSBuJ_wSdF7KxgHVcGwX)FyaNM+<6mOtNJ zwxGG~?BgejLZb{FdM6J#C8KIeDl-Kvc7#4si=f93Poq*bDVVM2D=H1yCa56H|QOEH^GS_MjCEz7W0 z#43}aJwG3 zXZMKOYIiYA=mjY4OSe1Dw09l}SEg&`Hzn@L$W1!VzRSea|CpQ-OJ0#|>3A)%H6Rj4 zh>(h;Yjn1&NQOxrw`&mz@z7A7cEbNh0Woyl14(w?fy}L2U ztpeHawcz``gvTuj)^swtVA$l_XF!8DUg5F1%p`JHJh}FCtFGX4f#}XRlm#ry+{5{f zdx6j7a1X={ngdcRP{pBiW=!7*PMk~lT3mNb?_K_bcKeJvTUnHZ0pLM}xf>#)>cAW|_@nNuRo9=Yz8t;!#Ft26 zT6Xi_bUud`E6NKvsjd|5-d*(1zU0Nu{}BCL_m%uMhNqX0fT5;-izPyg2m45l0j~qH zNc^?#8h#}hS*izgB-4Jg*|bq0`x-c)DS zqCGLj+|lCE19o6*)S#nNzoC8t&H};1=}4~Ufj-ahbPk7)WBk3%*;OzVeM1(s1sn)_ zG6R?;g$gFRBqA=}S$6+3^=*u*RWAhY>YM_PRv)8i3hRLy%K zHB&}zvt6HlOin_;?q!d$@b4w)-g znO|z*-WGN_xY2wJ-PP35_Hhs9goDu}iwQ3kes}Pd<@uw`K`+jO?depT_fvF0euX%9 zz!NcThYx~vi0mGsSR5f%JO~>ygIu#m=44@=q83mUgrYiaRjNQpzj;~}I238wo-7dr z%ca2PTJWG*N?$VZK&s*dLT1g=s=?-!+=wX^$nygM^^?4)*7&EV+W|=1Qxf*jQ*SaP z1-8k8A-x z@L{`QCKqp$Aw9n!(OoYP^iRYLc5gZP)oZ5?3qI{hJ$xoV~| z!|2%HjPh>22LnPU=ex--hyafzRG8BY!zbrx*fbCKjl{k!(VHyrr!eHTB*8-@*QyK~ z)0BfAvb?uAIRrUvt4rn#g7r`3ZB#|*n_&DXB2~6leM#D8nAOeVaPxPx|4w%m^J70k zq}&~hY=P!|;g1L?)~*ltPg0xJ>s6}eC-2oUZPt5PbsO;#eE86X#Ow+_O|_*N52TARR=TyUBZI+RoDZP99bIeBrC5C&U-vgALgdSR_Y)?zPRrm*0QlY_|e=~_t(rwAKn6^qrJQ) z8ed&!RvvSm02`OmU+=>C#3A~LsM>qzOh1}7;OtkQV{eyJ~u8+$`xR+#SGC+F|gk_21ww0)eXG`B)AvO(C_8%3(Hz2A+>ar-+!aAArL@1vbaPTf+^g%XBb(;35yGYlo3FFze zuO(|J{>T8Cyctk^7sj78>EAa_HTrnoDAdUBO|b3XaIh{J(-?~WDZK14^!azgrUZQ~(>(szt3B6= zOX@9I{gFzV(fkyv;&J5GQ1awY4{lb>f)U}^NWo&wyQN$RjrH%Ix@qJEy-B*-)6A}) z@JOaBy}3HKndY~a8h@4$#uiqFn&s~qtKP>|KF4D255Rq6oEuKyg{ds!ZrR2AdgMGs zOUNyC?6?F&*ZG019W~`}9iK5zc^)hGq~3=k;Hn!tvY)Ii4^=vldL;zL!+fd&inJ!~TSGY=KO54HSK!4o_ef zX_uC7?0y1Y`_UxmOFe3H_wct4Fk1KpE;rDd>?YA`z;?K|ETftC#)kwLXN6w>@YGg` zwA>eS`Fko}ILx#5s{k;|#>7>6&AdDkz`C#6*-@7(&-j#`;QG&9wYlAYT=WDwQW$5$ z!}8Y8`>t+J9tObsHTD?74!!^eqEaU>V7KFvRWfn?&%gqdcQh(}!~4MlOk=|!Tk=?# z+VoEYlSuVUf(;!&;1p?xUoN*^nKCOFuAfSvQ6otpZTzQUpzZ*v8pzlWe-T&jc&K1e z=JT7tOfMEj{!biX@hMb(|I4yA0(b$|q@?B8l&~svugmXqeXY11L^8UA!fiA72AYIh?E01#M>tz z=evorcon$4$u$fEpDAsq%PHav+G4E%Nj5MA5DLBQ5wAGQ!Tm=KlstpyKYTP&7dF5I z>VKOBZ2li)Mu(K>BTnEH%oK?YezvNY@}-Mc12N1~!U!Rya;!eNW1<2ArdILVz$vGl z8DB+vdh9cEB(aDCiC~@q)mVkH?-@p@uSeN1F z5*4CBgJ)En3srTGZ|owHXKKp;WH7KaQ+mkw2dsQ8Eo9@m7q?Al zfo}t!U}A?#B}+%e&7ZmM{bw*?b7D&W042;;ze&mJs??7!^ig1lz2WvI)(|4JPDQHj zw|MPu>uF7SfVTTb!qh;kM2;eq5_R@;MR>YQX_{ zkVJ>~O%-lwbr4ngVZ!vPwnD4vZH~t#$K}u@h$L?uYAXG=yw*W?Qn3A1o!=LSrpONd z$6!QRe?s=}ti;LKUlD`cwNLXfAg*9{!w#W;Ft6LGo`<$C$2sQP4dy)bz!orLYZ-cQ z=&^8H*e`d0=?%xZHUbPlHzf?u(!y}u@2bhkK7N&%3unfX+&z_>P?>%RtqQs}l9O`T zwXxgkrd^2F6UmsmsQmIjzj=An+_#}J|0Ha{xP{A>njgJfTC&F?4sw+TfqKCx9>Lhe zX@A)LVSuTa(~FoNu#Q9aN3PCy1)hEyV1jhuWr@VX8e!pAo;c`1Cmf1XoB2ZpnEL>y~GIGaT9)Uvq}79AHv$0qx%$i<*kr zJXK_I#k!|TwbY*F$W$39{ud+Lr|WwCTF>TA55Sd`7hJHjtTG9CUJ31(${Tt6KvTV_ ztn_0lD}Ar|u71wX<N%;tWG{%3$z6JKx)1-T%1oZXZIGBcR+!&1R5hmQ zipLm)n-NGVPAn36i#pi=lD+(CFYB&KOYC66@!a|~y-b>a)wk*sL%q!!y>z#eMgZ_M zw?v_7&^jgbncJKn%v%M8CUv?EsW&oE#m z7>SBytWJvwK^D)?`usj05zq|1D;&x&~sB38;X%ik&5rAkcyiAa4N^q{x1iE3#@=g zNeL+Wmk9~32y(aKi#rjovdJPrBc_7jpSQ@D_6#=cf2#`AY0A)iMjpBM5iGR>k11Zf zHI^;y?{C<5wdxySa%PW5UJ`2nz%_Ub*+nMAz-D*#@g>efrlWSyRlpf|y-?G^|9MKM z@15$0`z9QH|NaN@4KLUEA%~#Pt|ip(lV)CuIG}=m_3!Fv-X#ZiVo;S|`L!DFbQF&h zJ#=c!yRnW_5V*-Xfd^;_bAzAV*n&>1cH`S}uOT^KUT7Xv;+{W8fJnDVfP6*lWVe+? zPd&a+51uCwU3jpRU_hNgu>Ie%Axg4y$dhfZAdx$txv1B5%pZ!|k^yh9wCS>NOysPr zeTVX)LizMs)Q5_@=jYTnjf0>xk-g%!c!olL;7sg{j|qViFbs^LLZ96@85q)EI$mY1 zAjprI9<$To`CG9eMxDk^HJ0oTt1>1*Ktwv|`k9t)0gI4c$QwGjZ3F9E+bf7(g~Mzz zadDMjP&bH`hddmUl7FIg%WnF@xez};y%=&tsw`>ydzDojR14f7TiDg?n$E3DZbuFt z$G5pkZF8FBt{|vtqW16srwAmf8ojP1MoW=WplqI2bC&a{WZ?7;<2g03NZXstuQVZ| zco!`p?6>!NMjmv2Ef8d1@Lc`c+DPt@h`(lR(x~r-wuOp8?F)(wi&`9ZtD&Y1nrMYz zUl2!u!27OVkwzlPD2Gpr&Yp9s#tQkNf(IrgI}qUF)dW@%C;)eClw)vdLQU8i=x1!MM0d+?pJDY`pqmOF-o5y3_azVFP}dr%ua=7?(-FEMFsvPB9 z&6m-gul;0gw;W3UND2B5m7ltdh7`m-jv8RX5`c3@Xx^ELqC4cq^QS7G`6uU=H1t>l z^Y${FUts`pOo|v_V)~mKz7}OLOLET)^e4@`y{AxBf1b)3hl`dDaHTh+-qits6ZcPS z*fQS^{GP-&Uf|oug^@EmKqUNV(QpyB};&@(N`YZYmUNw68Y zYmBZOS)&91+b}a#IA7ktv#w`CeqhNgUVAdq=!IA7F;Teu5ucARNYfNNw-2Z+uI2pV zlhVMQh~_?6Y7W@?*wv*A04o+}ZKBWZ`)w0Q7Vn7^VI0Pu8C?`NdAJZ&`5?Wf*-=#@5)F6!2#d;CFi}IPUvO zZ!_En8_sYgN&1yl_{wY!0xX*BzqIE~_}!EI$(hqWk&DK8;$k@W3=deGZAqpT6tr}) z8($9z=>M2P4sJIZ=UlWAZ_}Z1D1Gwo?^u$wLHqROIaPeXU-0}T%e^?%=9|{i?LW3S zE>)?n*z&Yx9KhDfROMTBfKedK3$tm>uQsq(;o$w3{zj2$bDv}+850JqxU|4dz6Pc7 zpY_;iJ0DCN4>oW82xkqF-;4tQY56{CzF1h(2D;_b4~*j=1pxpYv0(swN^$^1f|q7yNgN>?cw0Ps*k@3yA(qde;d^5!27 zfG3Ni8a}Q4C$XqEm;3k;vu+NmrMlq0wsQ;YJAibEb@q`;Gy4=)zeYKZ#sQpCf6TIB z-}_0ozh1xe1buh-9~&srt~rT*Zykn7qhi(!X*ZXA<7f<$s!Lo@ouKuJZ-}z9Kvx7P z(Aa{fS9R$ilcr|vTGX01GdR?)acsChIcdhIYWza$1~C7|%NH8G$vz|tA(tJa%5phJ zGGN*Fz}ejOWMd$8{UA44r>Pw58IszC$RvocIh5kp16(NgZSH4@U;W7%zsM=H+S07#{}$E zp0!LEWJ_S#iq8%;CkIREnXWwV09jz-wenM?PxwDK`#Fy{pfA`qsc2Q8YR|6}*u3f- zwho8@T?2TTKE_Z9`;W#-%w#%bDwk*jO-;?d9Rw1f9A+vbEWbRN&}l zJ9ipUUP_NQ^a3rwXNY-aGu!!og3piP>aY)xRv^g8e|gCy3zJUA*a) zBH#cQRNqwb+D34lt_7rtsypgj zpjN9=P4eoIwXBU;Heg7)Qh?7GGxbt}X-8-nW(}94%jL_iRJy9ie!dv)(OVTqql+ zguA?o4T?69_1^KS^*sDFIMS_h;qn$G=tj?kN7%$^4{s)bqr{2!gcD&uJD`+-=B!<^ z0J2Jc8P67>4qMP&&P);f*A1UsMlha0R-vb@yK^~Q{lzCJ!^!@?jYR<+hw=L$S54hZ zzMU7C-ton{xYdJw@^bp9DnMH1{?|Hl=p#cpu=9Z>zaMI96Oewa_O;Fmy8fJ@7|W=> z<6E8XqCrf1O=3oz6OA zD1p zxxBUA<4U(c08Cn0VuTL%eDjS`MZo>Z|1uwCv)Y^f<_*@lfdZ)b84nu}Pgkp_Kw!;D zfXTPVD4Vse-#^Rgy(-CpX&ubFD#@qU*TToI=>fCUfJDYi79GjuE@7apKRLtXzN5VF z%KvU!c6^VP4no0l12aq2H=A8nMk0*YApetol+D)LQu?Uv2=Hf_8qksHbx`z3JYY*| zhWeYEi#UD#m&#j!rawHtzH4e`sfP%h2RD|1!Scr{`r4Zw$O0_Lda?X)Pf(r)9ZWQv zhLsMVRa!Fzp!oqF!fkv7sgeWy?_PW$(r>(ALE&VDFA#Y+-q_#74$r^FPNz+S&rkFK z2T+^J_vbdcf`C6W{30M2a_zcjCV0d2sWxU$hjQ2Iha-G232f8EIGwp-|HJB8Jk03d zLAR(e5y0^r+WE8LJZd1`R7Cg@4)8}4vqwrnacFwHGxXvE$4A@^xr+-z6PEbkXBG4> z+n8??(`Au8eUBgf0G}UDQuNR-w?;QO&hk!R5f0F=f+J1Ga4K#tbQy%54zS9xqoe~xbdH{i70_Jy|{9^k*y zx+|ROzz6`z4QCCTcmROB1O96E|J^5JYg)!uM~j+pv2~^TzG(B19ZR%KqGnH<#0E)_ zHhz~e2ZifOzXS&{#r>x-cJ0sdpXo8)*;HmR9{!3yc zW1f=APs{NdUEWJRu{ktN-E>^&WJw-~8%%09^|IgpLbBCMUnaEWs#d6>WCfj!>BGk7 z9&TLNi{d|YE#fuN`yk=jnGiBpF31@5HfMa0X(L<1G%dauXp^*w9bnW@pDw#L9gO=n z!Q@Lc-v1R{KGxzfM)8i$>8a9FQl;=vfjGQSyF^K3Jl+H{a6c|5*A5bGiaf&VF#qmX zsrlXV!iLuL;is46HL$^|s@%60?SVuBsY$O^+J-ZONg7_w9sOm_VW_jjvitQ~KK!Kq zYBxx1hcR8P>ffp0=(U8RK z*9YrQ##71VBYT3=9%a84Q{QUb&mVcTZFiAcR`lh!i+j1%#HHugvGdKce`C9*dPllM z`0}sl`G2w?{EI$!R@sv=;E#@P8}gm-K1ABR`(p5L=AJ6FV`ZMTr>6&^8G?fm+oF0P zeftQCB;&c4RMBBT)f>ASd>(%%AgITULxd(nWh%z>4MEt;8rQ@aSRq*;CmQwg^k{!e zugELJU{`E`?axBbmeu8h+fBc0Suz(wQ0NYpfbUNwncWv>RN8cpU%Zybn89Z+*$Jjj zU7v8HckgwZVLIF5rhfkW?C&5u)cd=1t2>_lBg0>ziK$JE&Fp5jufIfD5dzQ-E7f{@ z`obm(l8W=V=YcKA*-9p42@~-g66%AEQNT<;8>EKA;e?cBC~gXiya7U%P}g=L_&BN8 z&;T4R%%D*gdiEC+koMrFQfI{(z^f6d`D9$=|g!diCma`1;^Uo>wcjLr2z4zDeb4+FA1*1EkdV~og5Q7C1 zq@)`bxXr3op?^#6;3``iUpfdP)E0l#?L2>0M@w&(qUPbZ<)8a&vd>p8L1@!we|^-I z<80tr#r?Z^TeF{0s1J)pr9Fz#IvYzLw#8r{c!KjoFCF1;HrU0iq*(uEIQKpZx;-&C zAapz5=iT@nVi|Blu%;G>ZgN{L_xnrnY={PlM3Uj+z_r)L{W70knvxRBm@3H2W3phm z;uuqWK{XCh7j@!ySW0WCn^cv%nV-_(7Pq#|7Aq?&nD6o>sbQb}ch1Dm929{7Dl1)x0el$2O@6_HIvc z4w(dhtuK?9he%Bi37)TcB7|SxI%ZE~p;i5LGCph|YM=gn7a>HOGD*Td(5~Uy%Jq(VvVSBg?y`7HS!HsbVnR}vx+HIl8<8}GHpmD~ z{PeWpmbl!=se%_k4B!Ln$E3UnDX3dfhdzTp0K0X95l3>RUjF10BPCCzr+tLmiEe)W zW5mvLGR`NDWeXbrIQKFQ}U*MyNIr< zYyxf)hDXW2Jge>{Fgms6v?bbQ_bEG!T_>)aDH$n5@Y^v;)u@v0R$Vt~~9ii3_Pq=$^(oU5r{s`DAD& zS-HI9P^@9>#A5*R?!-ztS+sFewHax>LhL&;;othOs7q@5C#Np(bW85EgRSt@tW=0Y ze&C+Ps*v`$O0h5PJcO=;i}VT`m^Y@obZcc2*IxbD!xl&q zDXA-60iUi^=hW8t%R9d_*^3AV1m0%}5;%id2?wAU1hA>amy6hD#B{sqCAHbyBeZ&* z9R-WWkNms}k&SzYnfA}o^*N~jF99+B&bDKmsCrb%qY%YIi{l{m6Ab>q0`VLzzOY#* zX3MNmq}@BOZ1Ph1=h7S@?FLxP8VugLqcK#>4Y#n{*s<4&!dC%i;S_W{@H-O#^T81? zI+y2=bfOI)dRCa;3NY7Zpr$xZlC*qF@mUhgAM^=~aJc=f#|UcVR17lMX}nNjnY^BF z!hkVBL2!bykmE5m7!)^+OBYT9f7Lcv1KYn^_()=@(cEmr$`i6@5R|T3$x$c|#PNV( z09!_3@qw%MMZN_DavEzNMK1&#aL=e!bsG#R3sew9?OYT55b1J+1C|R-EmaL}JAMOG)kXJx2uj$J@fVjusn zgOCdihsfYng?+yIfvEK)9 zYM(8}N)F}7-r*hgsYklsAjC|Kspkpq#3(ebIz(WWVt9a9#t`87(&{42 zG+cBL;)9#Bv*@8%8zF)k8l9}?(T~o2ryh_jO_L?8GS&MK@@t;46zd3hHNQuazjckN zTs+&|f(bn=5B2iFey8&=ClW@tPlUiV- zlLdh&fjkE=Vj5EalSDo3Eql@X$@i?^t5Ct%w|d;v>WWSdw6$DC6iQ`fnH&v-1Ob2#{3%R3GRg2RfhoPy%-*VISk3G4{)34!x;exA+DU9Ju+|39cqmp zsGtvHys#@lBl%U`I;yL57&4zkDns~N*Hg+H`*oxS<@vciP_BxkxFNK;mGKawg=G1j zeO_cJsCnjyVP6nT_cPaW1!=Oa&v&w`914^QDL_eyr+=y`MinXG)m?%Ca%y=*V3J@> z`i@rM!axz0N(P;fjZPYo&W!M&74o=^3drZ@#lya%1@yt=1rd{q6Wz8lnS?WiG+2Oy zWk~XC7OGel5zfgBn}plu;({5{CuO%}TEL~UXP9Niv(rgJs@S*Mvq)?f)vZClAU}?9 zV4oVfl3VLMF~yxwxT=$w2LMCBW^^OYpdU?$|GzjpJJ$ju#bqOun}Qkk!5-*9Ct+xG z>reNcJk_D5-mxD?V|-Ic6AsQY>@u2!Q$h;-1SY&%&{vZq(cO0r-S`ncn<{QVg`>BB z8tJrLSGF96in>GbVny1a;;vd<_Kb1K;QSr&-4dVnQ6ck{ls<|=wB>>Rck5fQ`1X6S^>HUqy z6Mm}sb&8y1RViRUA7ZF%h#2P5IF7ld%0aTP(33_ep0^eR4pikM40aa%qE_-7)qPCsWJUf zQOJ>@O3x?f0S#aXl2(=xi!vbs!PW9}?tKDLNai zWf}=_LMXDU2^UF|w~jk9^+*xJJT>Ml1PjrW@O=Bi*t}d>l8ijXtq3{b#jlPkVD7P_ zIRLX8Ud|+3O-LuL2oB>RL|gI>8y6a^!Oy#96{o?t*(BBu4oOLt#%C3q$wqT>?B89W zzyqp23^`k7+K)MfqkFMrT))d!*FlKa-6i%~>sh_evzEv;lYMGTKU7STN#bA(56Jg; zz%X_bz=#)O8V+Qx1HeNFL!x?2n&W&GjF0I6T8fQAK&H#kGK9mUrr#+S4TkUMTSy66 zZ)ts7zwN?!I)yx=5*Lv)1sISv>n`B!MF5ZNz?ZtUF?AFnBwG>E4mb-Inh3$hU}3IF zGlXZhLJ2Q$N}VG)Dze3^b~Qg*{ABS1VC#%v69|8b=!KjmEfT1UkTDwR4LxwU%#tVpx96|`1XzUw>{_^7u zXmJX@LXwlvI6t=c@>55U5(S^2e=z9l{f$p8EfD60PvK}LAs^w_K(fdV5EwS=E;#@8 zF#uq35l+ChF$u|K0_yH?@Jf>pRSzBCNc)5{`Tl)`X#)*bOpec{KS@RTpe>5jKH#>J}S&bDgg?v zJ}D#DX*ypv&S|9Wm-Be#BLKj~i}+-C7g(&=7?dxXvj(^>{`Z>zfQ1D(!A?W6C zcNSi4cRphldB*x@9*lMhBRMh(r&P%$m?yX>WG)BadjkL-`SQ;_27nUvfMEo&w$BZf z4X$5VxPTRIU%rgrdvGkgf^X0#{pk<@(fKApN$zt{@nN{^Ll6DF>Bde8of{v6^0`O} z+VAi;7*@{KCfJY4svXq;Mm}eCzNR(@#!t1!@|5$IezWAl)w8(6^LsCFX-o%abi41u zodMIHM@5i!)xT(Au3S|o&Y$0qLY_KJ@>%=MCJK|J4^9<7R2Y8;clbWQr7=qxXf{>L zMYH`f`l9o)zzRbAfFQ*@(<;L+eZhe`!>+lbnT1QPPv-1DCDp~wOINYN)vN#c@Dq3x znsuf&`hGpw*EG7K`^x{Z5=c7Ood;MkzGloFU2^{A8dkV=?ZPN9CU;cJ=Z5&hAgTyV zNSv(|g-N6w^?w75v4r~Yz1J>bg&Q|6;fXC@1jAvlA9FP@ZofmP37x%rsGn6b0@9PNnczf0N z3^P`lO{=mu=hb#<+>QB*>)^3N{;5C~_S&gba!KY1d2jVH{`q~Z@b=rA4>*q{>;c0F za~m<3q;P=zHo*MjV;GA(|6lRJ&M;ygbwN^mG+gGWZ+~NAyXDK0?WKSl{+3Xq!Tyr; zlLMtcFvV03GiDC_GQfy$>0ga^&H~KY?$X!~jqv7CFf+1Glt8*w|8X00_1RpLg+KrV zoX#w!uzM3$k=_{+$9c4xOJ&m5?KuLoQW!Je!LAgleYvTN@BIfD@pr$wxFhNg^>Fv< zIP&>DpMtrkj^>6}VdufZ>`waZ^42>qgSq-Qzl}$AI=KmwzaDIUU(v6n@=B3B+#k&eg zPM?LNrwNRBPCq(4*=jbWp`krIdFRVID=vg4@6Ks}xd`l%ssxKrktQV{V4N2L<_wp{ z(#XiuP7%xisu`--f+-8v7J@{kbwQ~{6qr<;r69cq{n{_2^Kt>&>g zF!$KeI#nABFH6Et>vY0}G4t{}Z|QV0k3Ks)TS)!NlLv${4Zs{rZ6)-esxW5Gy}M*H z_~It37)Jw<_6E9a5_7Ui>_NipoRIg;)Fsr1ox+&;;SVm|RY*E3RB?~lqKe6kdKcoT zBOoJAHtt+@5n{96Hx^GL#6{<$=zSE+sf7yj%Wbc}h!AHS?XSFC0wdg`05iWV%KZ3d`e2J(M zYK*c)U9BRqG!)6UoJ2g@BZ;KMgeU5Bq2MIEeLM4Z_jc!gkoWmhhZT2c=DqiwH#2V{ z#EBztar<>t^Lpju2o})kwSdH5pc+|>Yv5q^??AZfc99qYgS=F7-7P|p6IDM?NM=bi z%#-x|0dbNKAsT$hC3?=xUoP3aF;$EfGk(5%7f9huUC_G*!oQynAnCpX4<{u)MM{aJ ziQyVkXr`pf?wC|UHN~=Y?Sa>Q*Z+WqovNo^Mhwst5N=xE2jO23MwiKnF)%dD(CEYC z{)FC-2%WtNzbXAT~WHOaW!~^}%d20fM3HqTNvYf{nRA7l-#E-Px zL!v^%Nz8m6WZg_Hp)mJPAYAxlp_X}q1O9A&S>u!W&N;47sZ}HHRjD7cNmt7Fp2v$% zGe@>*xE_a5v~ThgPk?%tWGcvfN0V7f zx8t>dgiW1k7??htxd;yvATP*Z6|jb5dqR{$erDjF^xA$=U)Zq%W7sFCo-_297bO=T zGbW1}Z+qD#jA{Gsb)}5|8-zQWWOoWx+=7P^q?B`WlW%-$bab?2Vz|bn)+jFuRa@~n zK-wO+uY%%CQLz-A=iHw3@8RL?>EM;wcqd?#ucV2C(g=*p@4&;q&AAVo*K!YD znH^5ZgJF!d6?+_Olq_g@*_AT>E_WA;c#fY#7hwwC%e^}V!je^em|vSRE`xBvW$bD0 zV@dS15ghpqD##5eIfHS6N|nkr5Uze4s|6$w4^-Iz=i%j@c_7$gt~^QgoSPH$X9n)c z71s8#0%Pzu50)&LAlLl$ZHTUjpSrG82jnebt*EDf4pfs_g7Mp#TO0hGyVl(k2Tl@N!ez zCV8^hO?Xtd<>`BGyz$2D+$y)^DnEmZNeRYMV(R?)JMeNVMCZTCEOMDaUh;mCLB3*< zJgcr>aT#8&ET~;3&p4am>tafwaMJ9QWXYr0o zU=RvH$DZYQH8tzZX~vJ!Oqpn*(R`fvsyk);7AXI5yEY4;hG!veW$a#5y9fnWXUV=i zEmn!0$XZnHF|bdbmaJ2JQ=%@|4hR<|Oh8vCnAVXOPzwjYrsS{wXxA>f@hPvLs49lf z^m!_; z-1u84`0o3tCK>d^NLe>|cUBo6Tjkj3DtVflGywh3*?)$D@6V7A<^4X5;AXK4AbeZG z6uS=oc3K;=-TQBpF6lq#G>y+=aT&e}S%J{w@B_+WmY+pR*Z;HdFgHykwT>|uM*AM` z!NdFSD#!Ri!7O@&*ybcp@|s!zd`ocwMMP9`=HtX8w34xh>or?ISz!U2K#0pjo)4f) zAN85X*ZOnDVfH1l;eu9UD8d~4_Z0eIhEsAzP%Zvv8w6$g@AfsYi08-jiBQy$=iMi2L7`(@-Aaq_ za994SQ|_MwO5CK8ZQ%ij&4=;yJCT%}`8Z+us^GIvJQ<4x#I1~b1^oHJGXY~@fj7be zH$(&&ieM?Ce()mHly@%KP)Z4PCl4Nhnm7m#X)c#Xg~KA0{NK}|T@oEn7|Ls>HY?C0-(=_A@-$NcmPs+p z!OJ{36x;ZeF@29EW|Wg}q~|RuVl*O3r+o_*nJua;z4RKm?db z{v#;)@jL>;8DWGWV)Ns}Vmi|xmftWEh>u07{mV8nCWFW&2O+r`ARi&b=G-gUWZ)*} zhwJn_>Oda8$aB!Zr<$+9%WWd0W9Ftt9DPKZ-+(wWY-kqqZ+vSmybmS+k7~5_1uN`9 z>Skj8Lnyf%u=6XZJGPl+i?jk&%`xrAUxAVj7ElW-QoER3Vbm?3Hk#xOcgF3Q!u0ab zNoO>SQ19_$RjX#=@@@qHOb3L{_{~6r6<*1ij}s)MUo&n23B(Nl=67eANWzW6<(NFz z-H-zOKFsL|{;lrA5P9wHsbt;K=Tc7O5w?od?LcdMLhKkgoEsU=AUN~yMH{Z>aS)JA zq*f}A=top&v&^nQ2@!_|#F1%3yW{6Dgsx3)-1Jk%{8rZtcaXZF(9XXFCGX865YwR* zNo)CDg0#k=Vc`QPS)9|jZ=P}OfvFTstw790mk&H;v;g}A%poI8|Kr(h+tn7If**KyZmzpU>@oQQ$zwI03z_^sFv== zvx9=!*u@Lc87X4Wsee-Q2B%4wM&CVu4j+;wRqoM>HD&D#hM4{_y%$}{^*eF-#O0ga zW~O2m*-pV&K&;9*qwWfx1T>rj%M>EOI6>KH#Gj?y%J$BpLWRG=uZ0u48`ZE{*_j5OF(nkV^||*R-K+ zn4%?(sa`BfdeNp;$deeZx(TKL18^o3!SNK9Bshe4?C{YT zQnD2^3L9y;FhM&l(5CTF3n=?pz$Vz_;Wq1^Q-_7O zIz=X)V#afmaI@bMsAA+8;>U4bkpdh7gZ+6Jp}oPUjR9HetpVmwgy3W?a|{7q1+LFJm>HEX?k)81X%FjNBaK$5W`L z^7Y0X;76)ll?DuJdE?_k$ff z-NCiVnd&-QU5e;ZfE!!5gz;-{q26=_3U?B1yv!ND|H1M|v_qoSv|o$fuF{5q3yqDA z6U*>NgtCA~NzFE50SS*-9fmJo7kE%4_LY==iu@1=tyDrN+f0^YQk!@lK2rqqro^a1%Gc?~m*TV~_2UEd5 zbQ{|6(0S3LLBhYBsaf-60ie8b=BiOCp*HTd_L_N6C1*BH(0St)sCY6?3rGM(giIR0 zB=9LhbmT^|P_Y|Iw(ndAvqu#Jt#)8*QM#2~(x~}PxMyEh2K69+Y_v?YaS9QJYcWK5 z)~FufI(tjoWJfoYq`~0MS%~L7E~Ur`>Feh}AS&EU=WlP{ z0~HTj_XgU9S*rCOJdq||Wd#kQN$bq}kOZ(H*2M!VIn!~X$h?&i3rKhpX~Ir{{NCgO;mZpC_5rLxLouV#TrC(s;05?s9C@mRLjVB{CwG;c6M*P%rku z%h2#DLzxHQ*Gx)wDa<_GX(;L0Kxc0$TR#USyZ8l-B8?GA1eY>D1YzG3FwVATpyV?x zcVSiou!5W>>fMUk87B^EK-9mOG5t`{%$p9ilX=mLfK2+QW`-Y-c|Zyc#6V~?6(=6w zqtftO!dt+8tpy|iye3hNcnKUScg;+BGR!`lr&@byA(S{8LQ_+5uPbz#T~|BqyD(89 zN-!>t*~;YAbfn=rBaJJ2%QyMExo4H#2UZ!(<#J?VZc>Bruet~R$bY~($3ivF!XG*5 zdnwr=0lkz-g6DHEf<-LGAK5~C=Ryp(R`e+eap7}-s9mdz|vYk0zk=vi}DEcBwrAy#wscIrtS<7 z3*YXd5|$C-wkk1I3KVG6ugeF)a;k1GpeTPz={6;fp*=+#Qz)_>zb>PL{zT zgs>Y}Q~Qqe_mcZ|ZlnWvz(omPes_n@isWhOo@sZ2kutpk9GmWg!R&`VyIwcZ#Y!I4 z^gIm(cC-oR8R^=dvxZzm{rDT%GMf&y$zK~mvHZ4=XI+l`XJ{C;E4j1g{3w8$2LWZn3#Ll?wvoKMJ zn(WLZgBZGIxPFlzu6#fjFM^OMBPw~n;glsA4Q1~AAdEv_b*FOBeYU$5qhVa(WykL; z1>;uICY+6mE}$Gi$_zaL6VNz12nFeF8>{oEGDW%lT-UqWUimpeKVT2i3?kSoc^inX^lk zGNx_957G*RzF*F7jE|d)mG$nzUn$Xg8NH^|W8$wKs-ML}m zUx4@{Snwmdn5C@DIHD9g_YHH?GDBPFvtvPWd*+_cd+xbhS72=0zG%Hgx4<%zLUXHT zMV0qpyw04aMeiD;_Y;SKaxtc$@*p+Qx&|z;25{)K4;2~NDd7sr^8%GmLB)g26;-&L zI`91p?h-{PRL~9FZ!UwA4+)x07Ck4PL#c#LAH@Vy5E)4%lK+~q>`EhCwl~%j<8ls` zS{V~u4YgfOQVjajjov=x^=0W=K>A(+(dUYcClb%rv-UX_qTq)*9R4-H%+B@NdAcf- zu}y$kp4Q8RH>ouz=>0?uL1LVF4wW}E z*U$iuHGm^yoH|eu)6Rn^*@i&DISo#ElQ|GsZlbL(04LvTL zepR&kh&`xSp061T$Nww$B0}$Y5cz;-#@rCwm9Armp(sn)kjU+m#CdrV@QlCfXdAi< z(h#GVHAKOQ2_K{J#~|LSmh=pFA5|tpu_qwjjp=2=epCwM+QO@9PE$|eXZJTWgz5yV zsfVxq%OG;S_wla=B<`wCAH^$!r)W1YPp(SA-Lmc`B=7$N z+OFAFw(EC?;?^Ps#*xblm4Lta#~jeM?1P`8hX&3uh)7yx=4Ky~!ShDp!>pnQP-QZ` z{D=Z$ZwPvsaEsc#mI|-Nq7oRV&^!_k@k28UGf>ER!@c+Mqj(W1J-$ML@l=`)Y(pJo zbZnS0n>Ke(S2OvHE`e8F)~&i46c379)=V@oYv>rET;t9A36jjj;PwUW91#Iwl4_o+CbBmCiC^@vinN5D;+)<;bNx# z4qkhb)~ZSJ((0oaWi+rVE=6>#YxRF&EE_%y&32t^WxKKgli1~xH+kC-dJ8j}qOogR zesNIAI56Xvoh39V#pDk=XVcn55XSL;G$x|Zr0JNZDZY?sU`r|~~PWD?6+PEg}OI6gSq7X}dS8$BHlJ8c18l$pq> z44^OwP@hsap632T!quN|KjRPavw9@RyYYu;acE)*hGDDJ)Y8BKRLCfB1(JSZd`g9PqTV7SWps08x!wSoue^K7loBP3uV+Kx<=kJ;9>Rehd zV%nbcaU{We(to~XTUD9FIu3rEUjqPN&k4kMndnL#XST#A`)FUI=w#dSfm$nTA9bx^v1{K#`BaiRJ6x-hDAI(`^Y} zk_|*q;$%q&KsR}vC9Yn3X3oKXYq!R>Yo72SRo|>t^0ZT@flYt8X!zqcjcpt2un`*F zi4(;fc;bkEwRUh@_c~|+UI#wv`&nDCh93&CLZ(WO>DOJOFKBJyYo3silW z@Ebfz0S+U4nyCom0$2^@5vPy9@AmHJ#gXHR1NgVMXI3UQf#3>O@*fPrL`NSDd)WWL zYe+EUWQBN#VP@G&E}KIxLK4bECcURzq9x>%tH1)|zd#`}^DlJZg@q6qw}*CznHZ0{ z)OzaaYPEj!_j6s>xVpMty{A%lDeo;A7N4$>r$vj5@Gac+?T|FcAo0{rJ4&++m8jGA zV1V?ZhZ{Anjc&)ps&wuO{0ww6=xHd@! zu=K4+uA)i_=7KcVMrDji0^}`a{wPp%)KhCc-u&u0WYA&caGapU!xnU~o+JTw%O3=5 zNgTiujdCTn2wUTj8mn3=^i(f1v>YNpt&GvX7C|nObXE?i06LM$VZ5I_O=R@|jj<)M z6n|7&{(+4ed)CVct$W9#%mbx-R2zl`r|T%tj|{hoKS0{i=1OBp`hZdzS5~D2b75^% z{+PUy++}POi~dr`XQE^a-u#NWWYA&cxOKAqpI#ZhF0iy!NxwV#PK7NT6-aDVa!8F8 zEfsn?l=&DiMI+!Z&?{qH_B_FQB9lH=h}QlI@p}icV{Cnf$%yGIY{Q`VuSo!7Y*rU3 zOGgk#`g#eGSx{_eRfa+4boJLy*AQ7Q6UiOc35x1?0*%c3$k2}k8u2albqIf*e|jP03H& z=AIgL!KA=ap{GNUdEgQOp;{T=>Q2^wl9Ow-wC&DC)Yb>IKz2M)KJ5MrTQL#j6}VM! z?mOD?+0uHMh#HGyEbGJqog}N0NvH61?I)rX#-X4TUG0k;dA({q9ZW>DtLmbBVJ{It z>FlJBx>3eN+howsnjLFjF=S7Z$UfuH)`e{LLTh;Q3+h~NG=p9uEyk8~Vs&X7U1ICh zWTP+0N@7#<)AqTiM%}bPo>0@F_`Sd-0bEKaAZn|WPqM!)eiF)J62!+3_Of-%vKoX|hNcFx2oTQI5jUo5@nVtDg+ z!sTW&s0ZaYrmHl-GckKNVQ!L5fnIBg%^!u>9*5Le{Zes>bE{9{Po{1W@G`qHwuN1^ z{Wx<#1j(xq>kTshf^RpnXbPq1B)ob z;V$Yne&fL)qq95v2$d3OMH|~=@@qeHOlIRkF9W>FpSJCfg+c2P%!&?omeQao*XeMf z6;S%?*jjJoPLEx`jDqpAC(2Q-MaMxx%t5xacV5%&!_q4k89_y zBBwxeYOIaP!$cbs{qoYL>!!*vQd2VR@y|Tm^1oT?!s#3TpUi^W1^e%za-GUVb-wuy z(W3OX^tlDZ*Ze7&pA?Ec*P8cdX-aq`Xyd|)<)?Gy{-tK1R5 z%9t$YSRgToi0FVlOSolBd#LsP!@D!y@a`D4yb6H^F@Fkurj6_XFk4Tz_W~rhz{JD zf4AoIEMwYJfBMAWj**LbfbTS(CPq1DON#(j#&pXdi}*atn26{A8DtTUXBpG}h!QdB zNqymc^9AtEiI2CD;`j1K04rls@pqw#h-!qt3r+i^f3aNdK0yW<-Xj9|1|BWqvlK)` zb;V~XXwQrUdB{A<8v(40Ng*+xr63}z4Vm*<3feE>Tyo9qAa4Y)GA4y2JePuqs5T_w zxfHZ-f{6%pBC`N_NF0EbF=;cT4ZS67ytm@z@uz}3BOe% zBB~4VMUf_X;TgkV{@5+vV{{grP4XnWocRC% literal 0 HcmV?d00001 diff --git a/docs/images/cript_token_page.png b/docs/images/cript_token_page.png new file mode 100644 index 0000000000000000000000000000000000000000..a7d86d57f14f9aac37ab09f223e4db5c9a590b27 GIT binary patch literal 72761 zcmZ5{1z1!~`!}G7fP_-gQqmyZU6Rrr(%s!C3ewUIOG|f`bS}GegMhHRbi0s;bt>|04y1O${I1cXNgPmvz(JleNMd8kmG z-s-p_AYk_V`XCNCelvTxiRh{-BaTopMgl`XBwmq~6jS$F*qsxx#W5!Ne$)}|phW2q zg=x_^0o3U=%t?0`kLHe-`gmp}coR&+{aW|%<5R6*E{1x3skjXHaoL+kct$}VgRwHN zkmQ;fvo|;d2ZTV*OODskx3?!)54|BEcr0rRi=hAd7LYOu`%guOiAeP8C&Hg9 zX#Jo2|Nb4_jtO4vyd8za{CB(lhF%Ox?PF}r$ zPG$4aX~5O{V*j(FKG~ofEL2& z&}3X*(N6DnVH{wBhkoKP+)wE@X8_nwh>c2BAU*}R6b3^R>A`iv`t4kopy=gxPP|ZO zgwDvdXCX7(Jc^!az-0Sss^HCUPtkIp=JmaiM#b;{1fcY-#%6d46% ztuvFk6y27O6BO}r0x7r!9qkJ1s-SWI?&)eMsL%CGkYKfybq((W3zeL(%G;10|KwX* zA;Zx6_VW6$apDW>Z~VbCCUcAGr2_k9{zJ>!l$FruK5{HKbLPmW{XWI_T@oHeL#U3^ zgv7tu^6L^b92JVce`UyI@tVf?Ek%a7SL`!Jaed0RgiY#hR#3D)-=YJDMmO=Mf;OY;6Rfl3*3k8L! zF(1INez|iBLnW#G%Tv-Us1V!AUKo5YS-;;R*n`1CZS*+z`%v>Z?2_0gqeIypF;lKU zfPbuZ6Ed_>M7%@ezV%C;{uF?c`~gpEbfkZl?69}tm`sE2#+&7PR&h!ErfENQiY2q3 z4YLle^F;GA(gdc$du5&O70g0l_?i3t#Z-GXOduUcYc{uIJx|SmWu(uvft9)~j?^%9 zBSE2zH$ImYO2vf_{`@YY>piPpfR9f_XZk&0?C{^d5fIdOsOa5&2YLf{m%Wv7Jy!;f zI7#%7XP+=K_YwV zo1?BrpfaIryIsMz?8R4My!8PLCqDx<%p9Fg_<6Ev4}GwTT<70`3C3s7auEFYwy#yg zB4IvTz;q*z%eSjHz~9#5_ux;>-Q?M&jJpD~tBN0MuGKKoi~8;|R2nePScSa@ogMp# zR0<^FkP{K{Q<|C0eKQVIab2O;SjHi-Khy9W!(XkF6XD!zF@D>mLh5^_Y!>gbFi*N> zD{-Bi-@dtayi~SgiUoMF=bSJ(JDz+V5OjBQr`30jrI5}Y1@plTA9d`wv;F_*nQgPL zV-ER2hr?Hp4tl}&IaI^TyBrj|u~91}Q=y%A!iRM2=mpo;St{vbeHP~BygoJlXn7`% zJNXue=e{_i^`OCjM%nwo?v7N+c`|_lw_JoV(;!}u(R}Q!VzsciB1dEQ zDS*K_&S}BvbUPscXt5KH$Cy$9*=uZk+~~W1`Od_2<7$ib#eXsasLVlmWXMTMpiO_| zFo7H4!I?}Vr1;?7?mtZLcSR{??hG5bq<7Ko>70~L;U-v15lJC#7bpyO!i z;lj@V%{5azq+~G+-JcvnW#Mvl$x2>Ptk{6FyEJkB*n!Hh6kZxZx1|XfPFhKg&=N(1 zdZSU=;dw^Ytc-BGiRyj%2qeG|iy)-~PVmb+|0yy3YJsK~>l2zRi;jtn3kh0Y ziGxFQ(rTXPE_{^f%nc||QGbue+(8cR_|W*-C?ZmJkGZd4$aT5V>3Rl1DCwlEy1>lv zYDV=@AENV#L_#{%YzL>u#_l+J=$*q^3N3Dg1lFLbkca;O!g2t)TSfY0)9#f)1Xk;g z@a{N5W_qb*OG>+MJb%D(1gLXn=Oh(H5q=bIL??=3ITS~O5{`B55QK@@=h-MP8T8~| zqJu)?$n34S$&bP7R&R&}?rOeTaer*}FXafAsW$VaSC^)_QgVn3j)_JMgD_~`l^qb9 z4>{*0vkUt$cnaW;3-IPLx9S4Sci7!!fV)huMV!rMb2o}@&ky`c8O$Z%4R|Okh7M!% z*JoUyv*0lE*!U^e&?HIr{w6}ZeqwRO0D!Ondf2SWTmVnqSl^~<*txcQ%cYC10v0lK z?1R_w&mjy9MN$#hty2LN6_w@HtkH!_mdbnWQ0^`$I$|12Gy=wd6pP%STQ)YU*FDJN z5h2ZoJjHFcf-uePF}V`Z-n-Z8btqQ#psdY$%ZLHV=c>uC` zKi1RE94WM~OPVzCp%bmtZ4QUMsqEly-Du7IvDzV&1@kFX%JgZf+s#4ZQRI2Il6R(% zEu0DS(X7yi&u5DSG9k8H_c0lD_)X-?D$GjgeRR8V^oW6RW{Rl2<-eKZx&ISI{TkvA zV|cn_Q68X7p4M&eCT1UIU?B^Iq`?Tc77v<3wma4*Qb1#{n{ zBGWwDfYWyOy^PAlGX5QFR$6JI4wo$Y@M@ZVhj)$#EM=FEVrb(Ykbbh8GuYK{AJ6D9 zc7CWEBh=}CVJYP7?DOEmgeyM-ZR>tEDeze{#(?8|C`CPM2R@6(4!H?1Xecc%`(inV zx)(XZhJnP5q~~ePt`IgXqT2Z@uSae?jLjZrx9hhk4+1v>6o~Oi=filx@$Za%ca#h} zzG>dT6ISyn)!Ay4D;BtBdNB+Zj$jJXK*KMNiTBolb!N#9GCrb7)3}UO)v{n}bsst& zGF4~}3v@~H8BqnvCKIrbTCd>1-WpXJ2x`J7PWmFG3qFbX9s2eGv(C^ zMJlEBLim8-l9G~Cl&DE(qJIMLUjt}qG(R8Llu`MyVktjtUOHtv_NogASd2jAot)jPgPcX`GfAM)4nTTg#Oqqv%atfOgdsKj;*NA<7?8?2x3}qpS@{- z;CN}?s*o13Un9ywX^&x#Tbg1z_k~l?YnU~mLeV(v_@CV17r_AH2Nw)09r<`65K(yg z-UpvqwJU?nFP4FW%W9kfr;M=69w4P84bAATTTP)?-!kM$PN^5zmOD43n_EgDSBtOd ztuvh$AY@w_d)srBtR&4AlJdN8Pc5M`G=pV+#~C-QD55v*b|vk5*rFk{;lTAs3Kw9Y z?@%kX_B9U|1xZ73!iAAy zxrr5q#rCvW+NzfFB6nhtRW9qMz1`5Fk8J&Tn|XiEtVDDrk=?sz%z6U!=CGz8W5g_) zdls(CZP5YwI9SQECexfc(?C0=TB&8LVd3J+{Lg1DL9bQAm^e8T4DYW>51<2CSYS60 z*U1Jd7S;hAVR`14zCM5c{3-YslYbX*l)tqlD^YYq5#*VOxs`m{tOA66oU-7V;zz^< z%UCrZhx(x#O}=T!4<)eVPu%ldp|qw%2$n44&v6ad2!q-dUm|1Lg}ozR)vQ5VkxpZ= zX2bVXPFK(Z+OYa(H9q4Lvw5bSFl;!lx(w+LDm!M9b%MPql0HH9s%r+SHhFy~I=8B9Q|n zBSD7cr%R+=QJKpw9=RzyB13m(?;O{A!$xCh85l|+kLp0asW8q~&jYc)JZiQVQVs_Q zH0(D+e!f(t;(6DoS*!3yUXUC0w;00}9{6jI7OVEOLE(c&1othlw7b!0<)UL`tH1KC zuc_ZcT=gOZDm%f&Gnn;$uWFPOVooF?QE8o~?*n9&zhoA08vD>dlo8vT& zk=Lvh+z}U^E#lq{)SG4x>f&#?ZzCJBy703mznj;s+HBHxk6^BXxz^IRjsxsC&DDZC zlruE~RmZh6<*nMx~<%%S_~)99~d;x^dMgCrn-&>_cJz7{!J+50A2&XM!BEG z57}VN&#Tllh7!`>i%&&E8qA))E*@Yy(w-<5i{K7>AB%>c8WGK%xR_G!GT#laWcKit zL<0xjLqs}m_ak;XB4~7~VT?WONuLI{Fk*p(`8xexq;N7rs=4})LRvrT9Q5#~Et93D zO~$O*EO^vMSkxI=Gva2m)Mqpqwo6^Cj625=6oHMz5%}%G{#R~=5#u{2u$l+f#w3uM zlX0@Kr9%z=wgs0F@n~` zFgM@;8G!uDerI73X-H@&he2f5d-1|WJDpCk>o}@>_LpiI?YQ<)}*!R}MexAtS*kJYwVD<8$HO`@N zr@HL`uQ0fm`;GSSNn*Hd>XTS5wI7}4y|a-cGQjxDuLT@ad`br4UVGIitHF0ukr*!) zej1i4aLO)i;l@x*+YTP4&~&xu=smM`V@b((r=8%D0%8#J_+0+@dUu<>C*t%SJvc;A zgxU=JBZrNR?Lyf0`na@nsWY%~Z*1B%5v4(aJn;qB{pnE~n?yDtd@8Wgmw8uxrQymn z3^HP}BiE26(sZW3)^Z}1zJt|2Ln>P#&{Wi51({#EG}@DRO75|~-ar^-(V|O^3SMj+ zm@N3q#r>cykRkRf8u8b!$ex9JmCt6;ILSRkMEKT0D(h_dtqR5JxyYuvYVL9;#hj+& z(Haqk80hGxFD*_FzZwTWIrrTPygu`7F}g-;^IhzS>BZh-*HSQ`Pu1#mJ*TbrF8EQ7i+dIp0T#jk=fK`i9M_HcGEF5 zm1cyNxUsz8GleLJL-NZub)%=?$wEcfIa?_IUvA^jbyr@%cU*O^)_Mtly1+(O!d%rK zp4F0f4_e7Yk@6;UBDt^3BWb-}IcPnozwT&7k6^6Q5qnI{COcgi+l!u$^g{WF(y;gX z?et^hK2D@UmAAMmZF%d6Szgq+udO1FY}yu-a7xQnd~fDMH*{y`0X8w zaLjC;Z3*>Zoxv8BTVEc!0m*sWE`?`DCs*EkNu$`no}7HxdhW+l*HD`zVD@gT-FP^& zc+E?FZBN>Cyf<1{`zq=9tv-_7Ax4mLCHdmrRa5^p+?uL;5Q3tEFkpT4qY@b(`+XpV zX#72jf$r5xb0y1H*1vthKwat9Xwoqd;(5Qd6LK8kn`~5OPtT-8Q`bU4}@uKl4`T8emnhPd%$a3X)}yhXY-X4u^y$Fu(Cr6p-cr0pNq}; z4;Zes3{O0bj&1Db=O}BcEXe^{l_g|cHg0N@^e(r!3$`}fwpbt|HG|0t#I!QnZzaPq z+zv|$4T9wNyS*{1L4Rd3F$c%<>8^nq)9M+4e&gSQT~9DsUp83YlwQBi&(h{Iy+|S0 z4!P1QxN_NOed3ozS1B;O6aIx$@Og-F2}|-sw>6L33WckZz|M?z<(P~BO(%VSO1JA1 zo=T_FY9YN<(hRn~)xFFB)}8`ihg`Rw1MRxitJAE)jm zHnQ;Ct=VnlS4$X}ytD?BwMxC`e;(_n?e23?Z~Y-{Hn$Pdx#EMg#(@crAV?N|lT%i< z?s#!OU$N&tkw5eywEq;w@s}9^P?a$7FTJrezP;3nW*HXIVJj#uJDjVL9T3TCZ=|yf zkY;Th5Ob-O(BjglkMGt8o`kA}DR-XtRzWR^!_ z$CF%Tzj-{r9Jk_TRcol&?7KOa$vE(ibZLSE8_yc+f?hKUoeEL6?#@o>o#ozl=mO$h zlq2x9yiY&kRX1Gz!}~nP)d%VBtiD~?Np!40{uq_%-unCxGVm*u1N88DZpYNBGTLyw zASNInVWH<$)pz=4w)^38v%6!#nf#ZpcuD3=xTO6qnsHf~A=^$2l$Mi^!%fRc2pFm~ z!|G+*G;sACUc|CbPE&Yg+T{lyZI3bv-JJMbWUV`6;n}ClTQ0S(obL7Ijf7iDzfF2M zEhmipS~6E+1zME6VcIX5;-@BqwUAYgXQJbZ@A0g>BG>Gagc z+=`0>l(QjuxD3(_#db|*JA$u?-8-gHXGgz(fbMco<9@x zQOMH2oSr|~0~-csOKYpvJpRi&@@G&9umZ`|_2I7W5`hN|WuID%!_p@=A`H(*G5~aG z1EibI0z!pb9~Ks98r}Cjd@~C~g{Wdx+YC}jbuy-J!_G@) zI1>Za5~bxvM}>zjD!A@9(+h#6!+8cSWfraxnjc_OJw+LFs&#JbL^UZzxBz+iRrDK3 z3YV+LBC6S>v&wkx_dwDJ{KTa;@9VIJ*&jvE`NgYi+yglVG>y;*yeFtHQN67#eC)Qd`Ke7R^p^S>+8%xTF?&;)JggjG38Ct2vn2 z(tcXU*1lxlRMBZvl7iaRkz^GdnwoheFzlByHQ!&?CZ}D*3=uC(55YOZA6sPy-`eTf z@0>#(2*x6_diI#uw#0TpiE1%pF$3eJ+1IEEv$1&lO0hsTOYO1t)AiD zxI~Mg`(A>tV)LVEIp$}Vp}L%LheV|+y%Rj+xWZ9yiv9hwEj_Jtk@>pq0?ELPE~vg$ z3`>8)2l8xTj|~_ybg^;&84)KPwjK5B*I9yasl_3bbMoQQ(P#XryKw>CduQVh?4A98 zw62(ezpdhDUSWA(_NnnOy-4dm@vb_(;OPL0h14 zDJSshB$6%7S*n{&G$w6j3fi7JeWQYm-08>*4ns9s-o$saiT7vysgv3JBS5)}ok*FQ zD8+0|9k&#w2O_Q3AIw3F4cP4G=9WR5Q}f=n%g z>h;0p8vKz(y(?xWvhK#~$6bbBE#^UH*O53;@ z>SDsJYl1w`z#{Ep@hbXlP4*@ofd}LVokr&ylf#J`do#auiq}CO@xT1@CZfI7JB%`N z10kTq!*VOtGAlWC-oVmw$pDxP#w12wAb&q+{27e~O@*pE*yE|bawSdJ!1ukcN#S0M z?8?;y`4Iq>72Gm9ekM02=nNH=@0atJtrHh(Ju${!WC5X;N=5;15D;X>R+Oq6%d6^Z z>evOG8w$8IcCI_Lk}E_~j~;;(`vb9@AzJMv^;y zg2}z`Od)7o$5eFk(Tx*h^(b1oue&wzbUnN4o!cCXC@HovNJY?vUu`b_{Fy++GL~fI z`fe-m?oxyQ*!lis4(i1+dxljzdmKoWk?9?Y@CU~IWwoO-fM>UNC!gWPtzzAd_BPfI zcE#xI0qkUt9oQ-I-w@J95{svW%jP}dbLK^}C;c>zKP?BV^-;bqP;7doN>njLnnP1O zz8Ah|c(d8#d$7vFDxB8rvH!uQL|2O`2(x|h?hSh!D9MPFlpssU|8C&924kuA16VT+ z_x=wo`X1fKoB&p+$Esjg-#;qPi%NU?_!J<|NfUY0bunXSS}>6FR%5*=~f;4bNL-FzPU7IWH;9T7N75&+z zC&#~d`=^vFtthdr*1FaD3dYMWHjqZ}aiI$AzoA#?3NzK4n&7JsG^yVZV0i=afZ|C01X>|+D9q&2E49UVh1L>;UNo_L4#MPeT^1p!&|UM!v5 zV?jFoLr(&~23+t(AoBa&pHpEHmrdK)yT){MFdnRQfVS>_&yU%X=mB(`nFniMWPE9f z1m<1B$||eLsZ}aZkVL$V)(0O4j9{E$B#M8d?#pMpy_8OA`SOC4QnbjpB@TDNb623Q zI{ij*=P zt`}c=-%zJ7{>DGP5;&{BuiMyItRxq3YN&TtEN*+xAUX?TR6>EOHdka!X~lKdnJ^UN z;?wla)Z1e13wfLE)Tao$uDl;Za<7@PV+?~~9Ff*4XX%g^oDosOdCfBD+_13pK&c~>HK?9~oP6}aRFf=|)@Y*bB zHTaPykK+Q#S+i!TqEg%7mhf*T|J2GLzl7fX+}n@Ms>pdeYOu3UV%3hfP8rY-yksOx z@gM!si49Q87V&TQYp(3Pm*&3sJ;I>2C(zVqnLvWyt%!C!`m?5XkNQu*@TUs8c?d1< zsyumG!+Mkvq`mKH?VqAz-&yc!Ep_o~i$_WW^CzmKnHcRgSQmgOEx%^vpCzS3*MGvL zf2tdjf2`)JiL0=YQju%fToDX6dI09c8*+R1wcnXM#CTL_*Fyq&JtvXnS-;oNx!8r& zQV}><+bQ&`-AiS1LnKGMfP0dfXUWPWa-w5dB*uK`uBcM$*&mOp{^Hx?32nLC*@5>%yy8OLSMYz4ahGk^(5F1r*St4Y{AnF) z+qKO02~;OIV01lYeSlq}QGze+Mdmq0_V+k4$G{4sYQmGMG4x-HY~L4_CLM^>mNOSW z5I2Wv^LcL+2!{gCe9DR4gs}Kq-4kh};CCsW{GYeQg45nJbXxw_6}Gm<{9JnVwG&f*vf! zxJ~z)mmzB__lXNhm{S$`~$>W!D~Uql}wP2-AUSSi9-m|Iw>pL zyB&0Ooyib=b;beS<_X&;5CyMAgyBjFo{e_x(P3k%o0KfRL-Db_HS;uo485j-v?(T3$2&)NgB|E&G7m|WO6(?>ZsEp`kO9<0&e~y zbDiWO z+a?WY1=NYal2moV%&&OV#Tj@+lK%81DkWo?u9M;F&?vBTs*lR2VF8)zeK#};_6Qrx z@~z8X`j85(8%743`?CyWpADXH!m9mO;wAqrMwaR)j_thiCk~zsldh5Uft>UObeNV_ z78%;zQ!K=~Mmugf!~WXPKXitOl%X zR}Lj1KbQ3PgUHA(9G6yB4;mNUI$N#NGEWw9z-@eCU;Zfyc#b4dW7R8}d#6lk2}utP zlz*IHTX>RTTT!>?gLujt-MRttk&3+7B0&A6B@en;{c3((N6NSmGCVPutlyBH;~q9P zE#BuaDM`x-GK_;X?1iqeKXXCy5fK}KEH}QrhrE;dcbIN29_9@VjMe*NrKie;7W7>5 z#+FvClJXRiFlB^0w);~*hed}=x1mRMLWl+(EzP_)=YM)d0DO@t(c#_P-M1@(6j)G-qxBY@# z_wE?KQ0Z2l_S6W?Lbyoc7ekBt!D`{DS?48uH)5# zH03|#D)II$yWzpucAnKwG3JmCU4x_`oIZ_-Jn$JQk)xyiqKChTK1(ti;{agfiGOw> z%$aoH4lUa5qUJa69()6e0BVmD$-sq6*4s9#(_BxM4#a~T+UySyFjURzSv0@rR%Z-S+w2M|=F_Q5Qs--Z z0&qZX^P9q#e5IH^>#E6M4uXPHyF!w3*P?cUQ$Hn?KXxD}4}ZGZAWYL;1H_{ABCsWA z3DhO>^=oDr&-$US)l0P|EBlodX_h>dt2<$`n9ta%U&0bn$v=L3iLeqQBQVPJ8QT&Jxzgv znvuk{3e>Kp&JK_`0VJtIl>i$bdb_jyGrNm(iA|mrcGSf-++Q?9+~9PEnmC9%G&!z1|L=zFIS+SzEz4i6t1z-JyWlR=ZHle?o4V zVd;PgVy*go8~99)MB%#&9?SOl%8c`aYz}mid^?>f?gQ@(o(sXT%Io0PwGG=N<8~&s z04*pdDLMsh{2P?=Rr6r(HGlp?nE8K4~f|n<~!ZVu$Ilk;CCb%nFpWZII1c;yCH0#i3EkH3>R-UIjfyUE ziT|AFS&&^hQXC-mbWxJEMK3G*ShRXw`Qw3H#zUke%t zL3eA*sE2poJOQVB!Ef?}jjx6cNrNJkZHsQz2I|r|hDI+6@_gd+CN5Hcjw(jgubyD5 zfs*P2oequoQ160|A#3eAP@vy$N<*5S)((g8h zQ&lSz-;wQ>LnV9n9kMQG4QV+k(-aJOT{;>4FYI2s7}#1!ql>q-}uQL*tws6PmsG#{G?w<6@t-ViC2^`hEQd zDiJGf=UoB<(utkxu*A^cJpSK45d_i9P85vfpi436=L-og0rLEl_i1;bMe|6Iwi-Ap zm8_Gn-sU4y;c;t0+*}yYbYIGll9McVC}D{8V`gUd`(2;YwN)33@rDSC{q(8bSs0q9 z6VVARvB$VaoeObxDB+|UesY9!u2=s__v<8=?O3IIk}zUC!}2dw70m^xX!rEA0ZE!A zB&U)Ay*n)BzGMpT_KwMR3*Dq8sg5Fnd<5}hh1OvR%slJZ7+0L4{)ti%ISawL!=t)*?&?=nCo)c+AWb%#tX#&q6Y=f@(zwz`J4RgU2p9!qce? z|LFg3Bhcw3^}1%2e+W9fAwOJE^teSxe&Km_#$B1I4<@L>99?6dCLKKZD1y-Ml{y4i zAqAaLbNo;tn2j$}Q-ai)HA~l`@)NC5nWKn+(Twnbtk)k=*TUR8-!f>cnN>zm1{;8q zvoE%M;@srNxNaFO!*Fq86os)% zPWacf?LUw2Mviwblpu|$Y|bRqF-`T@TwsC3(iFA4EpHH8(tLTPud+9-Tc22+#mzys zo1cP%U!hl_bxB7)A3r@oyU5+t_t?Ny zC3!0uX`ADJB<_v_L=d9H)|O0Jk^HbDU&3ET6AqZrH)50I{^-f@h2P|@Vq?@3^Yl)L zrZMs?jp-T2V16dsEewnCu$AqGqW zy7#*6*S41Z=h_t=;p56o8$#2nO2RIdzGo|rgsrhlXIVfwB}^D&(k0<}ae7c@;PU-G zQ#8i&<+o&YBBKF#==|A?jl$PTE?+0Czw{>^x2crRoS&a7p1coSMFoHU_!K<$nnb2t zk+;MlumZh3`R+$n+eV-1zs}-8SU@K~mucLa5t}j)NPQ9@`cJHKe~3rB2H5q63=RD7 zbxL96SA4B#S>qnwpS*c?93#HFZ&6Uct*RZRY~>v)fIFMV*>ZWHWSOqb?X4Bbcg%hT zv#|ZH87*TeSfr{?jJj(0R?o6BDTCS{83#-N@P9V{1ui1k@Z5Gxn08t(?@+|nbCwf$ zd+QI^_;l~u_@QCMoDFgLimZDiE2#hS@0oCYdbL6QBT~WWrK|VHh4*Ud+#(WdI-mRo zx304g)5sl_*qE#LP9dAl%ts5Y4ArxFz}F78M6|n0{$n z!MXgEeu!gIjW`QVlzR4hiRul7b8{w!iB=+-ssK^dI5JMr=`bLaO6cWr;xWXL?k}alZ>dawA0~`Y*P@rWCyym$RrrVHaou^?9N zWo3I0zVdnFdyb8K3{REOm_C71D%LWMM3~g;`a2tme5?1pZFatek5^n5SseTe0lgBv z4@Ov4TsA~L<~oCKYo$W|{4-8E)c zmv=zr2KRS=wZ8GtI(%${PVl51hko~G7$p0%ilC#eE0l)30DFDU?5g>V-H#nilP3O~ zR*+!bjCaTIiAj&a%G`{l!L0ki)k*g9>c2aNf)H+Z5P)-mZSIaS9uH6VyDVmUW_!~W z7*(_XG}^z)PWjRlDY;REdVvFZ)Eh%Et9O$FVBXZU%$;DrO$pMu(4)ZpHvY$-0nahZ`cz-M z80sg*MsjXe{UkxhH2m@9R9h-K&8Kb}R;EIac@!gzJN zqR$2WRyZM}a5ju!zf1NNq=tXQ*DIoQXD*I*3Y$=meP$1fl}xq5hzBm{CA8a#i5V` z8$8JL69Qsm0Fmismts7q+aybr<==k5*XBDjB1LaUcgEEb@cy{!X%MDwYob4y@m>Gi+C_nSQh z%U6I{owx5qFFHgAx3kH~u2wE@tblE&VUsNR8-cv^gsfArhyq*pN#nC<4GaSeO9s%b z9b0zz5u@#TYZ>O4jJ;X^V>f{zoY%>U*V^s`|2F-7mm~8-khBHGfs$x)0S3GGOhNzL zLkk#%M`FzZXsFSnDM1~bhYzpGoPPMND!<=+1)u=LdJ;W_HsXSL?3~`!*iEDCJLWn= zDL7sO679aQHw)%sJ^o*UhKPX})ix_CO8m)1{yYrXO=!x{LAIp*ZzDr!MzcYaMLsatiqE zc|vLYSIew8h_SFP-+7#&U2Lw&oe$0xyo_U|RWtg=&>>7wksv8U?@pUtO*zt{7@wVO z1*nu->zdv=)WxdWz>v%qU>^Na^6S9vY@ z;?RNta%OMP-Gr9Y*Qg%U9lry#30j*vGEeFObr+HHnkJx%33jr&9|s~ZnkQ}%GZ!nn z7>ls$=40(g=$4N1Vz(cst<8myFJ9~?tiE17FD`btk}@VL zN>z*f6#79KIG{)gAgq5xFm7@|p4@Mcs4DA22QwerT$IgcpC3`9-#0Sn2UjQCVyX0R zTbHT2M`59gf597KiKr)cWB|+-95?>}1EEza?xGvd)9{*GlNHXP@iX0f8E*Gx-_Dj> zwzn+Rc9@vCO5-eWsXQ8b@d9{AMBwgh$P`W12N*T(egda*HEPk(t+w(H%S}sVpS)GR z9%$#Zv|M(2@Rb97rxv57#R!W)mq=-LGE@~S3nbf}yiB`SN$ygcZh&>eAsK>HAp%vc z0+06T3a#Oy=M8zE>%Ujpd!*}Lsm&?Ux#@A3Ffl!Ds2kZ*)F{8=`0u1oP>?jX?GvPh zUf~#e0O2p4b!egy4$!rUVOA)nRq5DqWDX}*=U5moy(=vJvSCfO4|2;3Oj4?U1#u)_ z)f8RSXXLJ@^c69vh!#OACE^Z1aZ}MBZ+OvUg?XK@YANSdiAacAn&{DY5?pcL;gMC& zg;7C$MzX1Qdn-Q?pXO0jtjGfUI`p3 z#90zQJ%^1pY4LoGWD&3a>P7EtR}H4F(#B`{l8kR1PSHm}qWhH?8_Cdg{n{paU^1nt z#Jj`m=FJwoBKUaQ^5EA(P!Lku*DT;P3qZ!ebfM6nn=API+*3SWmrFWkbhOgETr*}E z0c}sQVL@7&ng=8-tlg<6a@OclKNlrK*k3Kc5R$TV-!DJ7tg2ZRNTa5`e8|gHzy%&YQg%KpjJwWp@AEK6I=B zE%qAc2gUGbrI3OY;DWcZ43-Tzn8;$)>l)&l%#$v>171w9W>*uWm&JSwfa{=H?X32X z(1a?~JJg#Ve!dyeTtBTG-SYxysKr^P~ zyM*LutR@kYR4sdq8xx4jf+X{H^QJa$&EJ2ln=oJnn0vcup;_r{*=2F*E3BvubGFW> zJ#$N4hAIW!M?IGeSqCn~SzSFuq<*5JK>a&*;Lce!MO-8@m@yY{nm zBmOs6-aNhHzy+IOyPTmxZEzTv)7`-L_S3@hbxc%y;MV0cIDI)ak0v|wg2weA$rHk3 z#&YuubgSkd7CdlNC1u7XT(PfhM(@hgx6e+Stf@^Gy!352zAi@p#*!Wo%sS@mr=#S+ z@klp3;3$B{rS@*_r~Rij$!Jfk-<@9>1co4`Jrl2Zk7#gbuf*uy5S(;APeS^ZI5TtP zIMI3AFn3CsJ*zY*sl@XT4Jv0b@2;CJeWFQKC1T(}D=f0dJ1G$(^yL5_q<``Q4~%EZ zb1HS4%9bS%3RCRTCw7gSj_YUFr@C99eETL3ifcC-~rF2Jr`nd!W1PN7~~2 z)ffNVzaUX^%ir^q`Z_PD!l$*Gw@_opYPJWX$Ewp#B>tIi`c?|oa#&|Y!0(lNQSxn& zvS_^6pav75RO;{qIrOSdA3fiEw?}G_6ICsq5m+`-2;;6#O6GZ_`vfJhA4zvcU}R}; zawKW7ZI1tR!of}xP3QJiv!hR3#pcZ`Jg`P8bo7g=oNe>LxHD+igWaepP zzbX8Cp#DW75{QaVD70#Wl0Fb~$0Cv@i*Jh)S~78bQFNun;UA(RG{4KHfbGa58MvqmmE&J7&Kvo4UI--z3qRQlh zHT~85XVp5sBpBjHiHxE=Ga0G{={c8O1PuRmVR_?`VoS_ z41+S}%)45X9+~q8peiWoYvg|jyJRVg8X@JSV7{}S&d4bqJm;fzd>cXSgki*v{9C>w?me@8MRLa^O zgx_jh6i1wKj)Fv+yzSd!S>L2*LEx*2#h((HQyJxJ@hH}cGI2>kx~FU2I@Fqx5t z`)=k-)hot>BT8lKO@2CneeQA$w;uT_@0<^B#o|8kl0&uD_IN^{;g)Jmib2hSwr{SZ zS;Z+00a!H|MS+KoDhF6enP5ez>z{?Rz<=eydC{@?;s>(UHS4SQEyc5Jps#(Vjf;ip z$aOcl(i+!(Y>j6Xu-4k>^P7w{j8DG^3<{XehLr^=p=ihuddRRBpu0C&;wWfcQQ?A< za)S1SHfLLhtmXoJ+ZvuUXXU8O0^`R?8a843K4xQ7jD@x%UV9h7yhTYWSd%r?Umc)gQog1WtI{ETNsh+=%|xRu?BJy1(W7Ru*zw*27x!Gc}rcZ zbdu3pECL1aM9brCz^&QguFVV)a&>(sOD9`b3qG4@Us-T`g{dVu_* z@x|A;%DGLOb+BCS0nM}GA4W8qTkZRk+1X2Xq=D4`$J|#&Mb&qY&d{AohjfRu#dH?VIazEU4Kisoe!>l=T_SwIEcK`O? zqkemNx)I>?=NTXvtPzrIfN#Mwy0U1{>&KaNS7ERvo}cHyjOd9MdIW2u#R8u&lADMV z&29~H>>B2i0t-c>vvKmA%Np3kGh9j%JQJ=ZyczyN^C*Ly$@rx zU$8<)BKZ0XZ;$Kpzl@ZU&g5Tfx_}mg#j~A` zLdacegj6Q1yUeJf`9N2Nym!PrD+w*wpbTdoH3 zn;H$3y|J}`q5JKMz0D=jT9JypRIG*aERX3eT~&Ct21q{$i2|AZ8Z0}(cX*t!*!q3( zb(Ya*o3u!hSv$64i2~CKpHrhhi-AJg*OBz(?WkU}E6l)7?MY#EHIZy^-|AQ8T7ALm zt(r^8gMBkgYg@M(mz#|d(eoPK{-p-RzbY=2$FRWHNV6_W@GGI@MJbwF6PLx&WMkWN z{9@m_LP<_@+F@`R8&p3Ofy$QQ_re`pPjU%gqif7hoHPzKGuOBs$sXco+iic)00I5N zP)}sv`f+4&_;M!US&Yinm2<6+>hHYAWP|cpp}F32<}dlN_?=rvy2UsL-Dy58y!l?y zZqVzY2P*ANYR&w4)QTuy=8G4z5fwqLMTpl^$3-jcg1VBQhYPAr!aUW5y%=~B#0xd7 z#sBzX6RQTZdlQs@91{rARs0)}?XXR|siOZxE{_FjZG6vWq~g_eLO|^Ln|3Y!dm;9X z8}dZ#O(?`&!oIPt9&$^3dJny#kr>uAB0gJbUi3io@Iv zx!Fc8w4vSeuhm^f{WDz#!tq&ZEQuU;73w-RDYaQX1X1-Xq>{4aD2u-xDek)Iy>({c zfZ{jYkf7G?c@1(vk7EoU!ANo-T_DdEA+eIzT;8sIqy=PcIM)vGai~j!PVm?g1+lKK zg~i~ci3OReH#`rMQm{75Nnm%Rzt;NBycmxZR$Pyqg|WeFZ(@|K>{W$aZ)*jU8ESc; z3u=^jpD>>(qq<%nD;}vakDGHYRqi#XkpFYfhiVl7D^#ip;$D%bk>jw<0qnrc*WlQ< ze*}x~bahmI))8fOFY5KoMDp5}@3!hnnr%z->dyPFQsWsr{2t?~-QBtg(#h2^$**?& zD_8NdQbWbQsomhJ$Y56Os=KZppW6 z!p=ymnrZLn^R($x%n~0d>(4z5qs%Y)ccWBu*hM{wH!I0DZ_BC(7> zBkr$(2LK+yRM;%&_f6dPaatJCG=gJ8{_9lGi9PGzHUi$|2me(D_-D8D>-}`y^W>24 zyf~c!`+0$tbg2f8O7tX8F+RvW#pJ8!ivU+~_JngFn*X@4t9g?R)X|ifGs(p8dMaS;|0Cg*L{rp>#3*PU)`3q$cwhY1YB7^UQ|2ngHixzI0=sI?hrz+<(>v z0nP%-aP7y?HZ{C>coy0!$aIbsy`}(dv$k!GhO8U#;ZI7)Cvw-e;%Ve3=~gXX{?in< zf}ohWLi9oWKV^h(AwnPy-wYuTY3Zq+5%`2eEikmtz0GM0dvgM9LZ)ibOLRz8KjG)O zfp68;oE_V>2^`00EaW@jZn$mixelQzKH5fJq=rnYG6?}1lgtb&u`YM=`x9Q|{7|kZ z*pJ2ncL{=*K|x8d$dB>Am*bvKe;Yd^vNe6lDv^BUcY}8&x#Y8S&iy^6TkK|AoD=${ zITG|g?W2elB2l0S{NVmkjtO=ZjdZn{o2BvSNSx^b>lfZt)#3}bewy53ZWsEa`2$O9Z6H~GEUg#rVvVwN|{-X#Q=OSPmdH(n&?8v828Ls98* z)rfRnV(YEyG0F*=sX%<30Z>K0@XH|MD6MCR27Iz`aANMcqRS3Tf;KrVYmJ3y#Ov&> z*^y`%+?H=kapE+sHBm4^eB@s0kI{H|0f7P`A)Z}TK1j2X=G^uwV0xW0)i}(_Yte}Y zoC((y;I(qM75#UXH`3J@+;7{9TaMq#eVj{CDsp}{_r(&w_HO)mruuzhua7#rpL|nE zDxC$}hRHM6yk=L;uafM7ZZS8v3RJnhCUid17X{t9R}DYge16DT$eS&(zvavveph`I zCM-$%)E*q6fCV;+$HUj_;$g{5cs$rr>C3eCrliGR6d5x;iv>ECAiMgL{td_q`-KAc zXEbj!mXknOz4z{%SeE6s74f0JZFSYKR>G?8h&y8YXe~u(R6Kg^b}jo84Lh!WrW{3c zSt3#NeB~&Vz(BiMz?CgTh=kjS&eK&>guW3uf^9AeN;3Jv0*j~^CGjI29WN`+Bc$&J zmIC};z1G@mhD1xWMp8eNvwO|`2_?mSyDI}u9aq=h>x25|JjR77la8d@nl(L%lh1Bq zjc!RFJQY?b8?+a?!At@RjmjS9NZga0<<@Bvoq)e}Q$$&M*TP~g)Y0wE7c)Our(#Rh zFKVncP*gZK$8#VPC6%=|Tp%>!{C&Zg;XC(_;mop=3iKf~^Mm+Izxkol2wLY~$>{8} zEW2MIBvF==hI4lfb1ivlM)!u6I z9LLsp{&>1HQ+_dnn7*sVvIb&5^TqCqGIrXb$?>=PJXV~V&Qh&X9nzID*r2`o68TeKb7?u zra9#I`=Uc2^U^@fIt|Vebd8~4)kJwY*w%Aw9N}|zoka>;EMu*2a@aqGPgy-89C~!q zQRP-!8@a>Mzo0%|$W!p<9`zv`g9S!J;6+VosI|A_(pGuZ{4c%UIZeGmIk1CUvdDb% z0kUN_hPKAk@jxSN&wg(%JGx}~^<>rL4A~1QI5xTn?fz=7wX3yla$0fyg(yC;nyUAZ zNMVDv)%h=4Zr3=`tayGq8o~zOKfc1=G_jBsBI=*~lKlGahnvGQ)Q6lh6vDiYl>=!v z0PbmL#s&LnW;j|2HYzOS4CI%zRMsGQBiKO0uf}A2)@{s~FHTJ6B3mFefEkIO@~+fQ zD*Bnoo1^qirCmt9M}WsGP>V}x27_#?59{6tEu8b<4sX-ZCU-;GSF$E}-iJLG;r9KJ z&sQMg(IkWeUJgMN8%U<9ND!)*xu0!bDMNL`nu9G_P-}}-YHQoN^klqMz*{NZeKn73 z_PEzVU6|J9BhFPldD?a*a4Nm(%pzLg1CYCC2U)%Whhdwwki|^wtLz10bTCJN-zf_Bv1EuhM@5os;g5rO000C8A9Bg!vz*PI$g5FemD`a8z^NDsCI=E&{ z?D_hdy2`q1fW!=c2d*Fao0<4|Jin%+x|y_-Ij^gHm1Wm&^?HhE35zM6?NN*ZmjfwD zb+dy*#dT5JNa*4i2K9B56I-WC@alUDDy>Pl7F(`f&G16MYEwpasaUYE3EM9@)Y@~y z!Iv|IWcv(MV%*T%>qZNa8f#;_1?z+u58=WokKJqS`4^%2RKpZ^Kx&I?`QpKqp`?J& zeBaC-yB0O+asQ?EM#y67>!8f5vdP?gy#M0xy!m;gS#tB0$6v}6qVma4DYwM+c`^-V zg=@oxEJ0J@2ieh!BvCyF**27Q-v_80Fa@2ZIKD^R*weR7ev$6ah#2mEWR=q>L6z?L z#?i(}%F5f#rK7TcdbmSempimOUWrR#ux_TO(sk(ONxB|?$|{@`cK7)*vMYvYO}AzY zY1Y9wfE@1yU7m#MY}?9U1XXma!PCey;?IM>8tFq4cJ@nVHao?J_{hn~Gw%H0t4Phi zxfy?HB|mTnBqQ%3Un5)sxsxgxh(V?K>GH{Ape@}@?0)rqtkXgxxGaD(#Isp%Z{*WM z6)Om6r;EsJB3O$5UTan7Y&raOwJ=GpLcrghA5>RF=!!UfuQuVVPEI!bC}ziU?JMV( z{O=m7w>`|!j>Go+`%o-Vy7Dh~m5A6O z8Q_ANs+t#x24=Jh&%1Q_?tZDReEuI^qAg3gQUrh43u<+0Rjv)9O}m_uX1j^|Hsy8P zL%;t0zx=nm)GLl(jdt03tXl1Q?(N!G2Zhfy1U|QpqFXSy%3Zg-B4sDpn)vsEw_g$P zUV$8#_O5hG^kzJYz7&s^LlgA1g{PQaHYw)|%9Va>+ zgGdEc$%-Hi@7p{`HqW-p;5ltq?VdEbqXfbKhSMD0W<_b${vPBk#6R+f^Xq=vwdY!j zf^8Pl6%C^$T14J8CFSKdN^;laVG5^xp*o;Q%d=J_Q9g3qyVQ|$<>ShIgZgYT!oC>2 zoLSFqPP<=9p)8*gHA)Ti02h^<(tzTi0 zU-;`?YgyZM>X+yJj)8pKQ11Vvan;@yHOmG4nd`b$bXtADj9s{RDEX`k?D;1C6A53A zejfI}i$DZ5!FmLdXA~Y1r`6>@Cm@fGjOYWvGsQL~uj;mohdH}_ zZz~dmth?}7;`PU=f#j*}+6o}69a{i8`A$d8B@tW38n9*um&VM-2{U#NC5 zf0PoRLpSnbS+3pQP#-U1>mWaJBeH8p4Ra+z*uS~N_VC5;cKV-As)#J4ZU24rfgjf9 zBJN+)06;Sc*hqFuqWa6jIiyctV?2$Asq8ezf0DFL>raCPWXRjCe;(Sk%-t`IUzlpA zhCPD$A!O!(YB-H|16(_OPd!|Udt><+cX8NqdbRAQYQLD;$bQ_}g%0Mk`54tK1d_&k zSiGBgcbtZ=UYKXU@j|WV7;Cfd!}b8A0J2~6@9O?Am+lm)rkZP2V0jrcoaFSr(xaQ( zP8LYjecb(w`VkUp&gSTHEL%c?>-H1FW^;DxdC?RR*V2S1z;C_M^dHBXQ0N^#hOdN$ zee`T*`n`C3ydw5%*?A0CD!S!ZR9Pl%Mt>I8i$d#{%NUK3> zQFGhmrJHzrTTH{+*the=jg&6HLgU6M?Y;b)h~pcnCSH)$lQrO#=!950cUENMu(wM+aTt8)=wMZ9$Q5n>?q1x03OBc-B}?ttxMLr0==;`z zyBWm(P944_yEBK3?25-n1ML(T$@mv0o3dH9o>Njfr9kV^2>ND5i>!mqKR081saTlN zl3J*35Ve73Xm=8tfKlOl0-SvgdpOH}atQANw29`FOWY0UJ4 zL3;p_h6!eWfK2cb5qkWSxPT0x2t0m({PF*YUd=~{B<Z7ZQy+HZ5W(rVx8h{7O7c9kt`79!K z%zKwu#l@-AGe65mVQf)ipI)1-; zV;gmx6PL!=W1Osmv;Vt7Gp%oRxz^rJMQvPPnm+h(co=LpJYr#O~NqqJ2-NM zN86lfgNJIF=q*x<0%?`<0{-oqo`0=|*jeaCf5V=@PO|o8SXT3Om=pcKUHETvzqmn! z7N!*M4c-O0g{Iq7?g4>(v&(R*&dC=PQ+2h9x5PR|F`U@+D7x)&P=8#6IT)ka= zZ)Lk@z^sM?Yms_(?PGt=EOgF17`|{ezdR&P6Xx&%%t@>}KC3WFVSWPY>gsQu2y>}lKa0wXH5h+gCwSR$pekw3 zcMzj3N9`6hV0=^yD09uV+Hf!RAWAdi&T`n#cHJ%+rT7@>ckLvBFCq1xSyGfX3rMan ze@%q0+zOvNUms1BHI%-)SBndUEofgsx6aoDIQGu2@8&@<;#hNc7cYU$sS<6C`ZKGN z3UO!k_l}|N>Psd zl>>ZOgcWH_VujwT-GsD}*K=OwN#)ap(#j#s|C+E23q#~ubB2T9l3StpVovZzE?08I z_1;x-Nmf+*J?3WI`MZ}8qvN~qjuyMC{a6ZckY zpU(-A99wqX+PC@fjbL(^15(`4?uG?W@2Z^pKzs2xSh#PU7G?0J@co0%0s!7c3*y1( zUrvo&HCma|;~FaID!|pn{ghS=$D43i3hjOF3QcZeE6793;A1S%h<^MwaHe8%7xM7r zY?CshRe5x6eVcT=WqdTl!7JM~0g%Q@G7%nvyJT2E}HljyB5bKsA!Pd8Mt# zZ@TTHJoDqYaU8$G@5HeG5VP|OBZ#!#W~i9fU-do#kkxW(Q?h!d*_*bbT4Qf5m{aBl z=Tu~|6&;dxs?>8?3}sG8fojf@eabV`hY1hIH&^XxzJ zRr6ns{**Szd&T)El!#zxece1c;pa<&ps&`9&uWucgDH5scPKucuc6O(YH~cLIhdLi zt~qJAywG&lDmU&v&L~v@in>Tq3)HYEBux%`osH#v8hoDPdR5MksQPkkao32<4yoIC zYlB|V7fQ~)VQ$mv9@0BqklBF z0THw}5E&a*b0?=0M$svE)vC_%B_h1L@D#u8g8No~R16!xWPx$vNjug(bhhPT+B*TX zH>(p1){v#>IeJkwPX-lcH}qFBdIAy-!Twu`nLhul2q1CmTfO}gKEWE~IP%F38&)PV z$O#`)haZVxaU>1x)dSiLN6SLct=fKI;UNn8&W~qa(4spcDpRuCgXnNhBWX-$bh2kV zf5&R_L>+;xImNtn@RObouEZ%ND(Z|WvAPr`CUvG06^G{FSdeX`IS3gqqz$ca&9aQD zEb>Tv%o78=v9{})` z2nGCh2g%h}5WlBo;Yn26^HYa4aUBXM_Gf7&R+c*(m5^I6HC;R>3hzf1%;4_CoEmcG3-pWVe3FeKi`5KX?Fc07wFr7C_N6 zydRpb?b!$K%f;5#-B34YezmhH_B0HMlwiPqS`7}Jo;S!R@W0MaO1IwI#^K^})29PX zQ6H2TRztF)Y4aa|c|Jt|dkqxbo%8;1?6D?-1K;=w^xodb%y%O%V+RH72?d$boUxdc zFgkhe%nHWEl}YPRLvwdj%O!0`HhWTy9q>VA&r|VX;qUwrbLM_dS#{R4 zxi&tOeSK9Tgub`Kf_?LPzK%~r6`*CJP2+tSfVz2MB1A4an7)X8)0qAoR z0J!foA==d1=h}OdPh|@PJy|hfziiwc(%DNfV7;50)}e0Fu&i^_HCF`B7PkFdhFkT+ zbepzR$B^!2V~BgvH_zaW-h+SXLXMz|9Po;HDxIUm)eTafmMn_tNW@%Roen4?uAI0{ zXU}hI6#R$<^xpJvm+iW6dGwm5h&5`7(^mNYO^P?u&x*f4Yyj{l0a8q71Ij{Pa2|Hy zvMV?~Dw6~qobHz**ZRLg2Y1fLtx`5kWhA3+DqaQu*JX%7tkcPW+s(RsosyNswsb(7 zFZ#)QQaF-p*Sml-pM(5tHA4z$jQmTzu)pf~cLxIqS)T~niLGyLKIii#6W7j{szM6H z;;(S`8)x%&#k3Y@GK@TBP#qddmp>HM`3xGLSCj>E8f9xR7ge8P<3N z?&zCCJ>=T*uNvk9o!>Yov0L}{4t$mR19`cyVS0{}o{~7C6x8I$rGxkkr>b1-pK$J+ zTLb%>OSsN%SBy;&;vv)h<;-a|*b$9#^}KQe4&b3IfC#?-uQxwjq{W`hnii zPew(JGCt6b#JEK#^x!tzzfgmpt7U;2P5qzQhReq8dr%Aj0mOvtZh{Vz*eBAEKKBg( zz{&)wdHd{-%Kh4a0w#GtJC+d?`0yYG|6hML6ny`nwPSxl(9r*Q+b2X&-3N~k^dI#s zK>YveGi2bEr2C#uzR453<=-wAL@4NQ3|*a%P_gh~M6!u;2M z3A{vv9)FS(8VI8nIElWg&KhomR>MagA#HW}dO_!?B4pv0yz6SM@{lh{cY+)%B0mT* zKp+a`0(#&4ew=>Z@)QHMUr$vKv6G;VK>wL6H-c~6C2g$L$e3~wb7dTV>J&c(6HG7 z=OgqLYmK|FJX3j+JQF_u0Ki26P=^>X)vt=r8&_~UV8O!lTMe{19Itza$V-_CP)shG zG%$e?5CT}203r6b?}`MqVfb&;KSr_9f6zhUApG!Q8uPtQn!Gg5i=eM=9~^mWC=KBI z;?5L-hBs1Q+~Mx@MH&R6E_kjN9o&d(;sG-74<#zXA`To&} zA}ENs3C;j*2vdpU6ZI#M$CS8Y^K)n8MIKdf|l+b3RgubrTg z@n+xWA#(m>BDw)U+^;hj;H?hzPhbgnp8ETWGd1Ae!ah~{%z25~ul*UE-<`o<*3uez znTTB}<2Gn+lRUQ#-*fc)*LZ~T*yVXqeZA*4MwK-d*j1N~?F)YK)46)CnTd67_E(QY&7@V}9F=il2ZFJ_w6yPj@0`q^3Vw^8)R?) zQSabt2)?tHt5WGFE-KEl^x-EDl2Ep(<`4|YmGPiOTOcnA64`zV@K&C0eKf9k0j_!a z{%n$KBvjXi2$WJ*;h6Rwc#?Apf9iy1m%Gs0L{@=5INA1ePjB3J8(7P#&(H2>Z&n|N zfxQHnb%0-A4>GG{==fSe9JF~^h@E!=`8M*LCg9JVho9}RI;!4DhHo|}VM0d&kjp-)lQ(PC-s(B(6cflu?2;4rKdm@=6QyZIn{1b3F<;~@ z6!F;@wb97rlxe419_mG&RQbI&W&lb;<)6c{y32yb{+d+g<4;0a`d>eC;@0U!6ZI34 z-{%VS+NjvF<#D4ION=V>AnXgzx4Wg1&F2M6rhupu@fwR;^9w^sy!VsC8_B{x{=iz^ zy&JB`CJP~a=r_gUgptnu*q-Et&M-gO1|>OtY^Vx{V2NOcy;J>lYnXSo*6Z~_q|NE( z)_xHZB#VxhV=N>!dH&A=45nBHYnLeMbMNogBO%Ye6}LN64PY*Qj&9i_K#bc)9O?ix zn5+ab$bp4YH+!3TB<8cF1foK)9&F$U9_*przP9Q&YTay8C!0)|TeTf;vx(^+I|9ot zPBU^q)%DY7Yon2TKmm0nq1mHzQQjh(X9`Bi{gI+1Hnl7pmDXP6gx)2ayz1h*TdjLb zwJ5qJi)kpKpalV}*f>Z74amx!nzmhq24^ScV+S&g%tuIj6yVBBV?n!j$VhvTqrRS74Mvxi=-ebvQ`XSb`@?@~0@{b36? zGePO-^E%&qea>?$fXc&ojB%uY)=31)Ez&i9RaJ@(>oHfaYYtAGIPt%bEI5*#$bGBK z>7Wz*Iq4M}Ka%W^bXHpVZ`JyIt#qI1UXOV@H0WaEabiGE#SM)e!9nr<{q;1u-y=5S zinirH0I01CBtNznoB}9n%CLJE*GriT%Dw;z0k757GMxNH1bR0v{qh?!Nj!gYA7* z_{;lzYGkus9jlE>w4O3VF2%_8r96O(4&{f4-;~-TvAGJNpHqP)Z|ijdw}oP+G8H27 zs3#VTIieizDd3PkQ=G>-z_JoB>10`|B7Yz|G{p}Yyneui0(-y3LZOJFFi@xmTnIog z7mGtgMvgb0eHe*lOB>+ot{t)YGY+6=6AUSv4{?_RTS!{AX&ZCZY6G)sgP#ALKeA^E zP#MPtp15n&#RJ5D;2Vml=J}MEq3KEatT-$Gh+VxrQ3fR#puqG3mm|#d41Bb-U)HjE zhna)$H|5bIV>3b+tYl4#qTOm8Xo~}Ff}6kB8mH8)S~!&6J}FR(-Uk!IV+1q@jmBtT z&K%!Yr%Dc;c7Ot^L<=+^ivs+Uqv7a`N-_@83}b%^n~euOnCb%=9tK_-3T8Dq6RnCb}E+BkisORs$^DU)F9 zwg;CfLx*iozPp}Ra9sgQLF6UNlkM))ZI$cJ2)?g*T@_6WAzQZW0rWZqFT8UR%r;Cj z&hkov=y<-UQDi&Y_fg#q*I+NImm&PQ_p^FVPnL(tDxcpXcxuQKz;z-|4rwRod0pqK z7BDqXuy`tie{It;IY8=}`L+N&70&KEE@P9Z*QutzI-BZ^w^GMAtB8UE!w{{z!#@iU z{wavqoBZ%IqqG~V6u->JWz`o$+We%sXU{ecY}w!UL$*WweQ+U0^zC%8J`0ko+s6Sn zDw(>sXx9_XsboJ26s|Gs^f93m!Dd~6B_I@Ap#>-~WMCv_*+q@}##=$wNqC|6hff`V z;#@#5+(!@lpC`BZZbYv!$R~Vb89^EF%?Ag?TCwWwg)ga|{vYSUN02tmUn~3Pv~RzV zs8jZurwp{;Ip*r5``?$npn?G#CrOlLbOX<(qZ(|{aJLraqw1109+J*+GN!Wa(Qw=7Qf)=N{>Qn8^OTx+>TG$;ou}g$X)EQPBp$}N zg&wu0R%M*c&2;$Ji1AXnlHQ(Gzonzk?RgrmJk_WwAlZHGE}LIW0e;)NEK?(0!hst- z)UaN_m7}IR3eBUbeH~?4sSw~lKkV?&ak=FWh zaIF-gdo#Q-k8;(PP0^u@e=B}BQOvfx%+A@Pp6oGB*x_a1#`qSnz=06S2WW(ZZnEdd zo5Hr5CBK3LPzCzZP-kt6wk<&^R<*7qCAh9PX*1ITb%!G4@|2szVr2K})Q-U{noWWD zI8YxvWbnUphd!%r`QvZ?hFxg1v!|a!@D$?Zp3jBfP`o0K%4=-bCqo^o2G;K08QjyY z3T52IW|SS=EINfLo@F?$nHSvY6=5I*nn?*Si|s#}`{PGZ=rvFvs6&}_Q5YMl@O%BM z8YWbI+*eTlalZV@@wWwxSrVxg2l#lN~>2jYG~Z2l;8nu)Xw)=zj9(4mO+l3#{1>6uq*OGDEh{+mDgru4ySfr15%g;IC4LL8a6D$i zBX4KJXv6*%i_sBR??qd{UgyB@8VVWc90OrI|2?wCZn40%{TCL1z0~|5PA|kPZGvc} z-MpRam@XFwJW(FQh;%Sgizw^8479SBe^s#d**e<^?>`xWGS||71j6`RF}~ zA*AEkA-Y}5K-K{Xf3S`4zNG41f@b6ZkO&m2P(Vt1K!LXj6wp|6YFc}@f4^&ZV6|>U8gSRx+KgjD zR-)dX>*{+M-Ah6eacxR~v%wd96e2CeXUw#)uAxPN@6EZg$rUcfu+*GOD9#rp7=g5PcxIlr5! z08=RYSD#d|K6Z|kkBpA{D&$X2S61`TC!?I`e?z3aFR!fVFh71_R%;Vh2F6<}jsMcV z?kNl-J0cB~V?V?=v4Le@xM)mt-OWdVjHzTNp6p}Hu9MJ9jS4SJfhxl?bc`$CjzitD!&g+loLMxPe_4v`E8K1$$sQPFrI7O>{3C+Hl|RMsvW|HJ;{ zt}t8n?bv~3l)&{ATgXpG>Z1GZL3u{%X+>pKI? z&DDieDd65-ai4M96IC;c5%uO*C?||Itt7mL=1-m^q{xFh*{d$?LLw0D1r z`pWRWXHw1Gd)EgTu_9fi2<7G`YuJ5~vlhi-NN;BzV6W@!R0*4tju`(^nz)j$=N3lf z0K9*pl<0ZOin;j)+h!+R@AXPTj*QUcrjfe0@LllT%R+T^n-=@XVBd?b;!D59QboyH zIX|5we7i~H!o@-~y7YF3YhmT<75u{cs+~yYtJ(T)%9(?%1`o=hufdEcCwfyIQqC39 ziRo!+Y6C*mpYEH0-Rm79eqoKfMPmEAZ`j6nz7SStCYARUPJ4CuE&oXPS?PpiiWxAy zanWbwj{y&*OhEqghWog>4lgRn zv|HmC{lJteOHAG^IVR{>s%nt0DQmdNl3F-n_0C8lRdX9Y1zp9^)_Z68(br{P%*`2O z8Q$4p>+3JHYsVBU&|l@Jf(wq`bT6jiUVtC232-8lR~?5QM6i`*YW0scne=GUmXNX|~*nw+xxF3fud9wSYcsPL)G!<99N^k&Ubs|p#7oQ@Mw|An_ZbMW& zgu4m@LFTD&Fihj*o3%ojySM;lE=xxEs5_DPI{L!nU-<;ETz)fkVnwpqU38iN;nzhQ zK|2wp`eGPA+BC3p&V0^B2TGNwbbE_+iDAD@LYT(#f0A@cwWNP`3&8SmdPZtIBfpg1 zrzp(Y%9sncOX}dIb6;*=Dj`_c3m9G|O@qkK%DTT?kG+@-AHBHxZZ%={3n3lv0Cb*b zC;|);IWxKhuw^|ZByTJOSFJO3+Octdw5w~EV~HS0HtFy3JNIQ20q6DcJ1=QWu+5H( zV?WrO4RaH{jR%5sZlh$OO#N2>4y~x>VW7Y{`k<2nKbyk*?lzTZ0wG zqf7Yv**v+>7&c?iY%j5dm!6ijtHotLj$eBzYSH|XceUxk}op~93{_?g=6zA>4rm=dHR8`dWs=B2a=ggb-@b0V03EYoer1g8F6ddI>1~GY<6)qXlI{=Evv^p% zIx7o!tdxk&1hFyKxa}2>Ssz*{J=ljYrWWl`!_Ts_$M(dW(qtip30Sl^T{-(!5(hY! zff?wHe!qn{8y#qZKZpi({0w7zT(EP#Jlaa>nfi5)0RLNK(yGu`mM0F$r`nZP(nuJ< zsFq1^-eqZS&&HKw7!{D(TD~0*jkAt_v;Ch+y+lxp*qB(jViV1C^ET zUFbiYp3S(id*T!%axU+;+`VyBoc@8%^GIXLbO>eNmlS&Me9 zvA=CxzRER4+~fVKILAN~5ntR7S0tN}2ecG!k`c5$^%}y_Ik+@Oo&dYK=ZyNG36wH9 zeC6>YFh!Los%aH>5vvtrlmfB!d%M7svXk;UCp-NuCg| z&=pLo|8Q@zboT}+IQf!J&k0&!Msvny)}7$m9Y;`py@tdPo@kR3Cbw9BklgD1Eb`Ql zZ7u0?QV{ccCnx|#@KAI{*JR-PlIOS=CQ|G(I`f&z zP=RPOQK|^7_g8cxTY=b`vKLWFOuWN^t<~>c;*2xh;=g#bQBuT!J!kBQKGQ0ko6$Y~ z{EH##IPM5t?yzm7O7%sJ4|!FGZq1i)q)}oMFh7L|ui(3%$>CpV;qOIbM1Z*CXXP0~ zL6DG%h+lGEDluOr4(Ux4R|}Acli%TdXSCh3O(?Rn3c^e4Pmb-=(VdBmZrv9J_R%!x zJGFQAqo87Lawt)9oM=z$YYicwHHlc$TnK!_ed+7!R7NVf3;5K*=cr`qoSU1v$lLTy zn9Y0$>i}~z*qk`L+M8iD$n?3r-RA6dwwSrYOS5Q<1_8%-#oL7b^@LYeg#2O#75K>^(X?Z**wWcNlIifDktMT3wU!s6JKkVqGEstGQO1zH%ICarV`?| z&lxeYryTz*aiXg1G}ZplbxG%jr+l9gYV;!Qxtlv>-mE)oOm_V5a3WaVXCeIRN%g#5t|-UruiDU(c!`q8T1bc6wcjD6&*R%Xft|x>Zv&-^w72dTfN6% z6#vf0I);{>vMbBvGOM~@QC%=Dr^z^)+xyZ?;~>#!v;xa25Zj@q7mWh*DnLFYBKF!+ z1IS{#eD>9q#L4^Ea7T>I7eP8_;rF|C2>VeSfEFsBicMk5AKi%kJu;K-Ms4Yi&~lNl zwn59H4!uipICW{pZMp=-m%=0&qT;(*Q=cO|Ip9>&YDuaJV(9Inni5&|4QHWqJ2S^h2f0?5+)nQTlXBFx_*i&ZHdtz0>tVIq0EjE4Y}HXY_9|LX(SXJ(D! z8b|R(pS4J_m&t-rWl5{4lr>NOkyN68B^9tzxx(K;=&1@CX~Nw3$j&cUkU`lV9s3J@ zs!FrJpZYe#_(Sm^(KA9!&?YM=X;_)X^q}zokAI2CYtlH}eD?*Xe1%-hDSFI!rNYiQ zdkdGIB#-ew=8)JS8!|+7uFhCd*$OuiHlt%$0rKg;sYN4%y_{|5*{FCGUIlV47{~vh zh3230CFIGEXUX(0$P>Pb@gaPuIxJ!s@8aWrwI6T~*9;Bmrd>o{}ud)+y-^rYlID}I+=E=z4;hAcDv^Ds({0b|sD`Xj&fwu_b83uP`_%Z_;ly}1}+ZDoSXsrBFiPl_dTZ5+^onDX4~Y= zsl<_Wn{$ptjHp19mse!TMWgkT{^^G8q!~D%(gk=0LhS|WLyM8Om zhaz(Tpx82~doqGzi*AV$43p%U2Wbl%fTX25Eh2;6-%Dym&+zNkED2Mx;Jc`&wNw!8pV+gq1{ zUZVWRm|{`X?tu_?tTnb+%d(Jqus$`7=~?90**rg#FFd~&E^RQWzX1tJ(-nRB1^u%4 z+WagwO|`;i%f`MHW+fUHf-AWxW@^zO_Or9K4m1$erBXMyx%KV7z&ZhHc*O-y5`gUUPAF|!~K5v9xNdhFqAJx5w{pkNa{3&T5xGp5& zTFb*C1~K~wzlwL_X3>bc?YQRs2OTV%gYOmDs+VRz=|2bQa#*z@TF4hgLsO8D=m8Ym z0RdcqvdBOic#+hV4~Y$Fu_6$FOF#NCuYm^*+Lqd4j6?^?R0>xbVi6{7yLHVho4pnv zLW9bIt_?c^sNR)1sf6(gX%jNFLQfjx(%vBo6u?@QOexDn8H!TWxb{e%OtHL|%({_9 zD7@j=Wogw>B@6cVqD*B!zN>dt?ZYz%zKIkimJkQiX{udHp2M$Ase&pRP^r>pcu=8c zCN`VO!s=L{+~jz?F8~NP_Txy>>qRKTlU8x;R(S}jlz$EBWNrsP8F|&pcTlay46c|l zXU}O8p!O{(6ApWd?(Qa8y9s%pX+0se@-h%SHtoG@1zYs>KAzjUv+pjcT90D~76e-a zE7ocW+pNn8d{iB?!?Yr25;@xYhgqE66OHimxyZ4lM&6AX;(;pO#vqR_Klwp(x8~bn z<5p2X9v#mq)JW;-2x6ZjVL+oQ1%uEjFh~m7d^3tZV*tN*fMNHC34&}#r zWs=^CM9gX&QeQlrD`h%t8lhQ{R7^d3&(?l1iP&(0OhbvNUF88rC?_j3%#( zmrI3;Yu~nH)M)#tvx+fOPiS!`sYZUM{K(K&PXH0x_XJNOv`00KGUe9SBf#d-E9v`~ z8tt&d>f%Tq%UUc;q$x<8Z|7s7y-y7Ok3~t4VMOVJf=Pz;_ZNhOknEUY4e#pcBOQ#m zj|aQ=<5C?Ic+sMld9%nux_469XvM2ofK8lhW#*5G1`-w53vH@}hE1nCMyVaY-)|rR zyeE99Bw}fUYG>!lY{r=GH+tA<5j*?c&025u0T4^{Z7~vMO*8O}z?`GVI(2OsmmlhtSYR>@FJ6y`K8eIesZJ@3RcRv8SmuWfh1&U%eM(F5s zW|=!dtty@H1^}v|GgTozyluO3ZCnVgK?4X}F!w;@ovH+n_0ty@3wTXaZ@2cK*M?e@ zBN}d|>OC#wM22XHq$_BLLFI|E#bzr81Wkey9r_aeqRuJXhYny!!wz;&Y~Gn80)QX; zybQ|>v2SH8jZUK5D4NB{koRz`)UfCWf)O2slIzH@@9DRL#7JXx;YovSA8|Hq766+% zy?)4YPR{(J=9Z$^c=ox*H06_U2yu{Icn#KGne_ommN*|91A=0#1HpMAKYUpdfl% zpyTRlAOXH!pK7S1v-|U~qGo52Vb;$}#iBVq$!NAUZI)x>4Yi$EIe+Zh?OgZAAx0Gn zCP9BE4@v0yfn1O82tF;PMd;nSNCRray-cFGCaC8%qO6l#H0Vlx0$`j2ti!BD9hONK z<9B8US5gcwKVL|td)DG@I?>sJXKolX{7CPc9x|b>5e9#p5)9W zzX}yJ86>2pufloGFYvs)^m?qgSkr5Yt>(BhwYuY7K}YMG1I-E}zvgX;ICsTQkPx!B z&X_MTCX(|-K1Y=`UxR`LHHP0*I(ass2w*xeS%r6`6cWWtXS8`d_EB?1OrUOXT@&ad zSv1-vd+$WV%z>EAr$jm(oAlVh94A04i{J~#r~L7LxUdvEZ@i$aw`S zP-Q^HfL9siyUr#-Ny`g|f{SeQUh5rnJ6EOgmuuX>4LsCVe*$7ftjY zjfRQ&oc{2sOk(Y0B{EvZ#Y}AgZgLzI$u2k;pl>?5;&@nTgPuf6OFsQvQHue#`H;&HFx!A= zO!$GqzVssFWeSA(yC;2Y!7Hv=EMLJYr%}hiO{@r!*f=@g95-x2xPJL)W%i~1h1PS$9jH@^{!p(fCI0>C%3i8 zmMG@l{dcf2cT9wg$yB)JcIslP-GQlXp23!2c*4oI&nJ$HU9=JSVaH!&ck#?DKJY(N zVorYxn}3l2=`x)Y$7DW+nTzc3p8^l;`)2~4#a*jX9 zWhuz>;gFbM$M}pPn1q-61H=OHlv;CnjAAD&HbThUcN-2McmZZzP6WeByBVGy6nXNh z@#d;1(U0sO?|pMPl37vl;2n~9?z+>tc|_E}`^nG${5i(vh{aqBx!iA6x)R30b;Z2c z$|*O_E>6b7|Har_0L8TgZNs~`TL|vKHNl+?32wn15`t@RhYcPGM6lqCI|O$P8f<~! z7JPAc`!<1l@B98$|5x9hDz=6_XQrp8r~6FL^wSz>0ErJj4>xaUX03Z|kYU4?vNm|o zpeJ;nQQDeriVn}~@u3~F^wmwRIxRYkZlLHnDesDg=J*V;G@`WjL8@G{$6YBQw_{;R zEJ+GtGAFUbQr9*EeMLdH?1!p&ntF26ni@}jF+z~D&KefQx@7P8Z%c)8{CG4MaXCL~ zBpL_%?0q-afQO&(;cT#cd&v?BOiU+tKN{xs9+ZjhB#{Hfzbp1iZ|6a%ps5)=b`;&e zbZ4W5lwp~>jeVU&Rr_+D!xCAf;gJJC{Nn$@5rI-yaDTJ2t2*JeYul^LMO8u|$9@Zg zz_0i0FF?+DuV`0qHuP%?ph3IQdPLB$!|8ZHEGjOzoO@$Qv1jkq1dZ|c&}5tLth$O z5kOavWJ;f-Eo4Z6$R9u2$9t5wBt5NIkHm}L_2@nC2Wu=;fy>2aJ^OE8`^U%5pHZg@ zyvd?SB11ZM_hYD+;#Ja76JXtuwW9r0TJC}>Yv%@yFrVEW{{3sJ;+v%V927bH=~2pi z6+uYW#jj}2On$o;ActwasuLzi1%zJEhQyHHVJMy0jBV~-ax2y3`zGkYHX1C$?>D1jOy;?_@mrBGam*Mea$iM?xwHhw;8UnvDxA6fvq3C-__Tp-v|y#x#!Ggkds6j zdlLL^zn~ZF%aU9|$;(p$VX6j}sPpYNr=XOI$jH(aRx5UXNh=(kdT<-`f`zS|*{CbSK^!*!3D5 z>TF=GKC&Pg(uB)+N=T;z3eAmd3gWczU7Xi)eVb8lhV$MuGjFK^2-EjT8m_ghpYSUk ziQ8$8?;w}A0yxqO`jTvLStTVk!jRL@o28Bj>V7ZIT zx5V!g#w^c{Ca~|!_Hgvb1=^oR`PT_yv!fw|j2M_;3^25#{s>^PWhr7|7oQ|F@j|OR z|1O9}m^T$c97jR)92A-X+E1UvGern#2R(oNZThf-yiD4$%FR0RPQnKbABBZ1Qd&iN z&sGe~zt@%ECBNm$rx(0_RBbx=8gjkn1+2_(c|(=19>FD+^3}(GE>F;DsynK_J71E> z`kgLU3-`LPRc`{&#S3OrGV82f<(2}NzD_njl?k?p?Plbi8S_*vJ#ilk6Hh7&CygF* z|IrNaa(!Z4UD@dn?PxeD?%@Q^yls5;O_@c*Z#e-|qG@Qu>YLiFQ_2&t--bo<_Lu$3 zz|Y=8{an50`m5NG3Ws;3PwqZ8GpJz0B3eZU*@-y4bS^r#M`J|5gCt!RSXn%JRdW1F zhF^+L#|rZx6%*-G zQMQ|fne#U5=QVzZ{MLu07sXF#lUDL*z$j=C=N|S5;OoMG98U~+F8d!>y0W?@nRbrl zRyF|2R8AwiYQ&v)0vvGz%o;hsl^;A&VzyWi3xN*m`WH!;n`P^X`aWL6oi*cFw)-4t z5Fi4~;n5JL3}aR6yitWD+jq^J@4r>mCfM;mp*9&S9GR(nQw}2>^M8_(DDYI_wA-36 zS##yq^;ai4B-c?il1c8lT*PhKOOe@Ujzzf#CRMnDz*ZjR^hpxX%!KyPMBF=T?Ld!$ zbrZfuA?NHOf7bCguVwH-KTwdMo(Y}7L7$b2MSfQ!*y`73WaPK_?8;;f$;wU8@ot%E zl2fBt??4v@42JaS9_faJ2O@@w1 zmx5*9&%Fd-ljHpx5Ct#&TWEmipyY=$@$YF(0RIO7*}nxhQXsD_=-(hF0wC|dg>7yG z%SrG**()dM5iI{Jm?1&G1A%{!UjqQ&<&dC&e+zwRkT-}353#(jqtPJmn=l}mLp#=T z;R)K=oB0w6*2_5z%8iS83weS^10#+DS}*22PclvW;DES|LN@j-*qaa$$&H?u>Qh zzNa7@E>e(8e2Gq=x7>6vI}&s#DP#xn;7BGrp0TOm-E_?-;PLjl#EF`VBRlK-%b`3! zCBfXq_&wje@Ibe`duRkOh0#+mPNHO(79^f-?`07b>sTS|$5|wge@0n6J1%6ep}YO0 zOD~s8;EdY~3qpR&doz^&h--?dzq-o)nAqPHCwNQjfg8U`rEnqY&Ux9+I(EmqF@shk z-W&?+h>=aEo&eub`I4%S0IxbeO3)`UZ6Ai-@&^KJwUD3^ZYch2d{-Y)Z${s;$QRR$ z--YhG=elO9zKTPIlJY<*5MlWXvaGK6{RZ46U;~fo7hx823UOAJ$@bDtiLQ7Jm-~76O@-0Io6-IFkeWx>SNxm)#Mv%${%Ds zT&K@AFriy*GZ+#zm^2vIEaO@8$gpOejqe8etKWKs-Q$iJ_ShY+kMTxUTks~&JHC{U z*MA)OARqeNMqj==Vk{83Qr;U|L5JEOX?P$LB@p5~qHugCre@1as(&Bv4sIwm1d7cR zv*3D1G)##od!Bhae5TLTP>S@9#D25my~wQ8Eq7jMkKWhBi(y$&9AQz)rO$WaUoLZ8 z!Dv?HNHyPov+pmn8C5YoQsJvRd(>X07VT2sf0VzWH9buXRWI9{jw4h@?h`*;{-B0S z2-R~MyU3DY$Y?s601&z&`je!3nubaT>4nn8$)WhYA(R>)PDaX<9f` zwSUkHYp9;IJ#{cdj|y);AgBQIuRs3Q$JnK>mjfO@agSFTsyiV1Cv2X^JyjEAA=(5s zV%8Z;$&uh?{ar59FfP$EtF(8+3kPy(Fi)L~OC1lkJ^uJ(HjW7DmFY4ibb46{^#t9A zBLe`9S$r&z69^z$mD!?%oda^xiPfZ7YHS3u>8+ZW&6lN(m?@Fa&@uZ5<*Ze~%NzN4 z*$uR1Hy9h-kX2)UnUJFV-5S&e6X@Ig{o8FDi6FV z3tb7N+j8gbBBdZlitdG3B~;1hXknZoSjs8fW&*V@EpwKj0yH@I{d6p0$moRJQlBxL zkR375aM4iTcW0qU1yqKO%u1x;F6>Z!-3g(_jj!Rb_;Fb80CLb*M(teeVSrJZgX8{2 zx~u(mddp`8Hsb0#3m=b`i`_KH-K<<-)$87?8FXm=eQkMOB5yoez(t*#S+TjE7Pq?C zd+|wTY1&3k+q-PS#8 zn=990)1PZw;jZYzdWgZtFt9JuCxP5$@3bX0nf{N?+^hnc>RNo7l0LShSYkn+8!`Wx zZ<7&z)R3f7W!D?eS+>4KPjd;`Ox~;LV)PsoYEok;*Em z2@C!MfCB#WFz^5W!wXXlcN6@(B2mk?T(i`uH;C2OYKJzz{SMv->VkeL5rv_D;0Y7v zE>(JIsVA#A+rg;zVu>)0E3&-*M9aB&Yh2xKdAu_^bEjO0yGrA&DMeQ8E!|1c>)^m4 z`t{{=4c)S2p~1P0i>2pydqW#ppmQW63hFvqIE>5R6jguzbSOD08NUQ7&EmmPr2ox6 zvy@Yt!<7MMv`0hceMo8tzu8gUV3c^O5UhOa5j#RL@ipLoJ)_0}ugSC&LGO%BUk*e( z^DWEG;NX-#!pK2eZNb&e(oy|0_6y+6HFmb+#N!y}y|lA?m7W|0hmt~exxf*A-0W9B zrCv&?wkHG~5r3NE9gBSIkz3F#@tMdp^A$)P)KWVeeh*gj+maB<)c_tx49_?}*zxT)?yBu2wI@OBM{_OhpOAl8dFFYd;TWP(rR#g?4La}qatM40~ zo6XZmlk2om@30esI(W>GTQ$ZO;s5*@0rO?SMy*?kT=V+7LVhcqmKQX(F#iL-F}C2< z7jc@-v_7K~$H!|YscWNpqD*MWF0mEs{`atta0n7*X2{V_F@=^Is88lo(p?;(+1F7- zs?Jg2Z%?V#oi;jJJwJ_1lJW_#K!_#9HBaUfbeJ1cYnL^}fMQGwM|IkoNr;)~PwYtY zJv>C~P6f)vfrbVkiE+fp@bzmDT}q!AKS5wIygs3_TgqRTD0@4WBR!>znKege4mZ1H zJxs%6Srjc)hRi-Lw|!fI!^qE>A%!VocQb#c6aWALSNj7gp&UcVeII=6>T*`Jb{5My zj29YyzE3(hc~81Cn|m(V<%|hy=i@NuA`~;-A%D6uvpP~?s|VupnaiMPq!N(Yz+PQA zo@+i4NnCW3!iF6ZlaZH5Bg3&>9*|Ss;bXvvSs-bpUlD}yE;`|n1sqjwH{+Oaz;dDm zEx!z7E^QF8f2E>*t)^Q$ZbQ=Q2oVNsANk?~L*b$G>Z4hPnx7lMi{)@!A+JCLvIiaA z%D@b9zeR@)_yL|0BD>JPI)<QfM21i0A48#zyqBU5bCqKuS`um6##^ zEF=_zqWF(j)~6tbB4!U3=9MNhB<1)?P`z=ijE-&JBk18Xiv^HgjOk1&xTFyr_bmEy zfJoD7GZjAwp}uFM<11dk0}xKD5d29EP{ar2Pa{w&{tlF9C_aZItMWtOxQV>tALMh; zUx=G=LtuVMaWi9!%3~}T2Bu|#>Dh35@+%N=onVryYMtVPjVb^lA1gnG9>#nOSG&!^ zd2;DkQII#FeUvT3{pj{z?p7?1) zFjVUi222Ula%*cxv?{aaF7z>Ne>O}*MIUy!0gaUCrc%t6j#7PCl$?2y2wzA?GBn`2 z1~IO$TAOpPNYpl1UZg;uV8G&yK#$bkz~w|8lr}~b{M4e#DC+lL#sbDn{8kWct_<`i zh>SSvY=y4h7#3Fr`Z>_U@!5e&25<@}Ssv8?kF`bcjHd?7YjwUBh zVG~>9gR|e7s)oE4Z5-yq)#2*+QG5`B7ZV2Tt-7;h{|QQ;As&Sxz6pcd*-TW8x5#wY zTLsB}rjla+@C*8+*g|Thfz==n!Le$$nT>|6nw^VMmWA3BWS7fZrranhk(1t*(cQ#v zJPQV*2MrQN=jL&88(FWt&f7mAGI3%JnI_osCQ#pXxF6^3o!)pdj(^yy!KtSCb1^AW zd`JI|Jms|nRswVM3_e1&MX-HdohtbH(KjmiTuiMS)$!Rq`f%&wCtgbFIWWy9aOW%T z@3_|YNNQr7-%UyNPhYfy5H7Qcp@%>$FWgf@B1*!AhAuYl5;BvWjz(3nJbmTCFdU1Rc; zYu14*QQmzWKiIwLR&U1{7Poqmo<=LCg^Ny42 z?Py%rUd>xJRxElH??7q${LiKQ?Vhfqd%BAM&@~ybE$Z0H_&MzrNSh4)#sJ6ubw&!U zX6{&mmaG#&i3QjpN4qLXCcA7LclBqep8d!C#L%g!q1!ChaIfRRtU^Y%4ewjTf{a}W ziDVNs?p}S8lf9u&^Ywx;rtnhIu8?Xe^NGxza9fA4SOu6&CDW19%6$awcv`n#!|cqjXieF%X`dj+?3e>WVpf`Qhl*%3abp2T8 ze*Uk1>gw(?F^uyPhR=`co3!ex$nLnHZz={Ae)9HqCz<(m$sPo0-zln|Wer6z={ldF z+dSyf*G#xCm=^NNOwZDzpev0A1BxNnsP4oZEswlnj5FY`HH;8Wj< zw#q%<%=u$sUklsFn}QjHK6Tq~bKC-dsJw+MGo45?P2np|Mv1g$a@&8VyBpom?EY?k z@f3TN_sR!~KaA;6N(ax6BY0iK-lDz9uej?8vn{Zgc>E7ZDhc_Y?|L?ehq67JO6s2Q zMspfS|It6Q@D&&w$*@r79c)qhpPQZg`aUr}xw%RA+ri|xv+y_Nu4=572ksmq=;1Q! zhifaKF9o~;iPV5ksfLwz_I5}IFuss_U;V@KJUEd2APeM4@U0Nu)O07Of>Tz-%~5S$ zIULIGw~L9KM)F0+s$-$b$elEG^_wT^}HO#_CN>?w`tk_QkxN7JqmVuFa)dp zH6b%;03)_k$P(x?ZC*s{yVrtv2H8h-#7hLp>}5-5%cT7&yUjC5T%ootm*mi^$DpG$ zBX2I_mO4Dp6R`B()y5yvP%(in-Uu*Pw?^kF?O;4E6e#O2DyBYO!7Ao_`0nMq!Ndfo zrO!jpIa7bVfHEj+{9NesoP-1N(#FG*CQjBa=%`LVo)r0}#%PMQef{EN6x*C)IC~Mr zVj-Ro<)xFxQG94Qm*?LXEGrZAQLT*SROGn1N1U>+A(2On3VST)m!Mmhd$y%BEm&OV z>SFqySpWbC5DD-)10;?_QTAhW3Ef2_i^-!Z{bKvsLp%5TIafbd{YqhfT4BEg-5L#N zEnZ_7D&tOYXA2-v&1<_e!d#*{tm%n~;o3~`SWlHw{&w?{?c7|Xf-ir!*iNcp&c)ZV zSiB(wSb_t=XakD}!};5}u;~e>y|H2$Y=^{&<$#JMaphg9LU;sY+w{=HgI5oZ(QJtX zZ9s)~B*b_LlM)EW;d4b8n)vhu74+}nx&KW3kgp)@r`7xBu_pPg?IX}ci@%uF*>ccv z`(X>vWo+Bi?MKk$ynM<)eNQkaC8%&Y#lmZ&!%8`uNLaOjOY8|4?z?>G|2{qq*slg~ zX(oaMGwGc;*4lpH|WwF_;k^BFfw$ed)^f~AiUQCc=dSHAQR!9JI zr8+aAMUTlSZBpnGZEfH)Bnvs>=g(i%fe!FAm~oKbKgYvED>)r(u}U2$S%RoI%x8xx8{(?W`)cZ>vUuS^A|L zJEY((o%#1hP%u}KCntZ3p49*#y4ZOq8k7GMB7_BHAb||%eG&xqA=%&cth`bvf?|vP-CD&1w9^~XY z@MA&gK1??kQpiNb8OgRQkE%yZSXKX^7g*y&tdwoR)jM1vUF8c<5sn7dkIp*BsyD6; zGp}L6u9gvz@9RfCr9*)|zN`GztqgvEH35uiBsAq&BUa85`Up7fJuP_u&kvev@E@EI zd961DMBIxAWbj%Y&sTE@5b}Kh_-b>S@UOmB``Dk1-@52*-^R0Qg1)i7|GbHGfQPgQ z!VCJMu08Vt^Ppv+n{c$BP+jrhEy=xyg#KBREST{g>F=RBWC=zikSq!AAv)+kY)B#SqTfSveC;@<112KfLrTe+czppV-REcr z|Gzh*L<8aq^{ks1lV<*xLS_ljM9Ig?GgV0G9fhyZx$o3HCEs$s8O2aTx{0ASH*zx_ z7oTz;JK&dcMM*f=QD58K6R4=J$n_H!T9t&`SH|M*ItsYi*uNXIJ`uIoD<2zbI$>!t z{+C|Wci@s6+q_Q>30L(LYq-(ah{v{q@GYwwn-(-E*Q(?nC&%k7VkLUeM01SmOF!74 zp&irBIc7H-!tvB_qpknV4AmSbj(gu=jg*aD)19qOGS%Lm-z;mk&*apV(cYxUTX@3T zr&xD&NjL}yfiy1_6lrr_FFt?ul(b&QUXi34h zct|){sHoa5-MWsH2uj4=p46$&Pu(wf*W#BS#akoZC4}T^2Jzg@tSGoocA;+8Xqs?X(B#hRv*A{$XCqMUMaSasIXNlZ7`pmSh zBwt`%5uf;C-|Ckr9UpO>_)Z>mt$DUH!OKzKG}*|1c1-?AF9iW=G%=+)zz)U!<%_Ou zYO5qm7uK5qd2H<l8Ww?Ejoyv! zY>4jkJbGfh;-}^P(NzU;NgdozaohEV41JgUE)DJu9*D1 zOw+m}7#o6)&19epk6|b9BbI-w_XRgoO}E@nOxu)5gg(!vJd8ZcyHCJ3fZVWxORIZk zYWNmQ`pRzuzWel_W6p=IK)cTf6*_dK0(2Shhx-GDowq(tV-6+(>eG6eoL59IZH~L? z4#Lt(YyopgXeWw*x%y^pvW8o_g?WLWY2wx2j?r#@3{v{k;v>JKxLaR4)}iWqHKm|E zm7Ed!N*cGxl#|aalh;e9X*-?$3spvut~CxHmUA9ETWGAo8gGycNr-mzvq%s8SmoZ2 zd);TB!xCObNi6$*9JsRO;%zDWtZ0Ne>}XH5W>rG|9{2#s5f-MGY+p1=3BkPcK$6ku z1K6qE>i3mX_?zw4twk9$KaHWbLp3{+Ts}z$)?fCxP?DbsJq2kyRe97p8eu5(%;!E8 zx*$#b-_^O{Kv!RMXTQ-n0u*PYb5OX`tX{p^*{-wyKJWReUcaQ?6Q=8QoQKVa3+*+W zaPrBTgcjs(+MQQ;` zLKVv$73eG*zVil;w@9nizAvx=mjunPEeKjEQU7)G!d<#r)(3EWIa|!NATJMabA3u_ z%yisFKzOO$;hIgSGS|Frv`B?LJ4=tNJIW`Q;Db@Ha|z#=UP4P_4sPR0yFk;i@^X>)C&5;mKgvG zT7?burcTsKXA(GamO@6PMR{LvYH(+@)cm!*yr@FKT~ymkK}@NsnWHG+d&A&_uGTuh zuy=Gn!r7CP-}njFrP1qIi@jhg-p$9_=og+j+6Q_5w85H!6<5^P$QJ+Xm%GHW977;h&}x~fS41Fc08O%D=yP$osavjB0RUMA zu(A|*-uqsyW-)+r=|Fp8_jYXLU+}>-IH=$Q#-!Z8AW?z;AxC8)!BDtb*X|8VPwc<& zPqOl0`+Wpk<-bPiFM`v(y#oJV9#C!qjqL?Rx?XD1queQzCtjoucQm!Rr_)p( ze9O%y;(z_i;Dng#=hU>ruj13;&Yjn4GUE>rjS2|II0(lZKU|C^j_OmDLYpeI1lhax zzQjHd84YL(1o#?h9mt3^Vl0{y>cT;(;HnZViVodEbkR&DUM@y(NuH-VDjP9&_X( zP0F%6pXSpFYaBaaQgs=)PUUPrjG};jWAJjBi`Vl+-jmWf`1*6IeoG zKV5#q#dny#8r=JHxWnDMm*u*?R9#NuAoJT%7_vnDAg&P{oa;~9e?MHqc)ssi zm2W`bjVW(edGhgdtwFz;)3>RZpES$I$vQ>XcNAW3aSwWKC-l7(!?~b*p-xx8ffF%W z8NEWe%lYNBsk}jbr@+oj`tKP1t@pgN&=#C}vpx*e!-9R|0$Ft6eT;L~oKz{MxV$E^ zNcucyGD}60;r>bFq>f)#=0?RN+I5ADJN<;Rk5VSscXlhL#JYH%`J*QTE$By|Blg#) zC=?GQ1Mnbs%TEM%OBf~H4rD#SgLmmYa|{fJ;l8VSBG?}YF4x8dH@;mo#hr!_u=1Uz zK%7Xuo+I!!z^eH-e{wUhu(F+Cj|vxV-)WymzJI<36~wrI1Nu{aDgbynlZ-B~EGHmlJdh!!51YPS{!Fk&wg^jQo~CyBd>?pVRY*+r z+NF`Z%j4|Ok##jHzOga$yZU`h1E>cp$_8%{+sEJkblw9W4eIX)UMsUG!SgKKstA+m z-`n8+GO$=W#%x!KBA)b=|6or2(<)EO0_h-w!Y9;Bo}f zU>|10PQRW+TMCu)qKiQ81b3+z1jMIl(>N+6IITsc{vAkXOD&&Y*4xipO&SttR9E*;w$gZqTN)>lh()^O=!I@D% zdcr2aU3&@i5=c9~f$TSCwxnnMHu@ZiC@{s#;fJG9VuNbvuXcGv)S`JhH?knltG~%a z+^%}&UtiCd+uivIN+dBC@Vh9xDJ`{{p3UZGsf54xHPPvuJ&|8tocBovP)|`-J9c$r zu4O_`EUgn46}ALVxhB9T9Dl7E&q0(Gl4JlLozCl12o8<;v-|kxqd|h;VicCz`c6oc z8sT`)GmXLNg`y41`b{zOD4pVJ&VAKSEVpnR(t+1-v_~vwrIPoOxyQ7Gj3SylJ)}#V5?6;{pBLhri z^lb&A%dL^F!B8frK6~Dja)aqd0XvoEPp1+8DRrxx#2@n{k= z!{-1x6)Q#8B36_%6glUu+WMmKgfF@y>2x* zp(?nYr*JVRaZXwOUNvv%Q@w2&gyzo7lXfWRhnrSup#Z@HbSMA-BxB9K1lE6lA1}Y9 zwAF8M@C3o-SZS!s(xvPAmhg~#q*)sc%2IES{4A}nR-O2!=DUr)s49;DZ|5-jdgslr zsuwLDZ!*Wp>e8t~hj5^({t-EHP0nX-AvwwOO`b-#a0=4t9byGk5I99Q%R2+I$tOF{ z6c5~4{tg}7;FjY-(lLl=K};nS&W0JL*7-TqkvgKZ!ugFg4h(+m&2!+1M{*h{HFq1*G&Z}N^SNqqm zzu`VKq(P3rtI?K$1iJ=;QT6OTn8IY(!u-i0h+8kx-^ZD~Di&cuviy>z(7Bj5VQ>E{ z`q^k-3|c3kNk-x8nUsF7MB)C1hMp$IC+2?b+t*%*c8y~RDli@e_|qMl+G}O?z3%1e3yN{ z28DO8HRCxrgjJ^lkn;LQ<-&YseGfc-HQ|f|+iF&>$h~k4^l)1-vp+SJWF3P`w~@Mc zbhEQuRo9tB(?-Yjo6;7a9qX%=`3Fz(5U*Mi%Uzm)H+VjEfxRRwVO&wex7-ApA8u{$p_1{06CnFL?@7R z(C*b2U6JB2VC}%E?ET!cNZ{C<CdQNG(ztgZgy7;* z5WjnUf@_oIgX%IMYT-lFa>P$vbuA1`l)2$ZoL6NE^K660jN%FmQEzwJzRp^IwL{O| zVtfDA@n_WGRDUl(wVco?u)5N6hw%_)j|rM+r#7VwfO9Tu!Z3>pzjTC-haGJ9Gq2^p z7FLqni^^ROO+ASZE1dfb7v^?oOnF_;-MQ!^ncIULpEKGZPY*j6PCD^N&WTfrCt^%t z9-Q@LuDJcTL+dD(2iD?3^etEERdrt%ZUUQhR$QN9dENH?XGlTd5cxbmFe7gIW;{C| z=C(#Iz4mr$KcAM>6q#$8oTnR@sJ%p5&RkAw+%(a!OM60(YYxcKPt~zVogWi}hbb-q ziiqr&hwDZjvC{_ZKF8EOC9c$?|IJu}0xF0n97I7xPb|L)Ar?L;PJJdeuy30MFDokM zZBCKz7|b)5%?}H7vPyO$h(uMuL`zU1sCC~c->;k-F_zkSi+nsICciUKY%ih~R{TK# z+r+eL?5e)2{@4s$Bg5HJ*V{uxP1{UyvXlV?5U+v?QVRaSx9Z1rB2)n znPFbAMMs0T4l04aDZd9aX_QYvr@BJiVb>%EtTju7@Sng2k08B>K(DFe(?A9Ny`^Iu z$4WbtKPMVn7XkocHVjiUHq~x63%eX$Cg{)$p7SB;cN-h_SL{t@gzmR>kB#Or+&>*# z{Hdw&5hOP!-IEMrVbr2u?XAx^v$B(`EUKKK1Rl(c@LTq*o-kh2&94*}YPRRCF7ky1nT`Y z_Fx}82(<*rVoRQQv&F{{>FjOhWd?~+sq*&<_hW`8CxY0C_TSQ{U*EJ;Y4;Lwq6rxB zUyXCztE|^jXi#HUo0^|{ijI|HhUi|+fD|RWd9QfGs5|m1|45nTLZUgpo1GPy(azOG z)a730yp_|*{`ZARoOXYTNVWUfrvpSdb_dpvD^UL`9H55ePI+}TZbX~2Op4XlK89G3 z`p#mHCk4Wxj^VulXo2hzk8irmkzls2QBE$Wufc;TLZr0!>Zb@iivnXX2*{lM$=N+; zS*phR_y-M-27(}OHR)G*{_lVk)Dq3?QC7sN_i;0r?p)y>UqUJ~jK}Ah^?%2XZ zk((JY>{^UtOohWa(ie()SE%XF9E|V)ch4pXZgysXDHe+yUHSsOSlLUoPxYESl9d^{ zhqof%Ik_*px+EWO*$4oIDN|lHNCau06@>oiJ6aGvNoikceC0PoiZi46&uBlU1>LEZ zx@m)}QGava16#i?LxQbR0*>xWG^PK++y@gt;ynQUD))lY2w!a?-j4#2JlJRn_*CM) zbSMwkQ(6L`JL*3pwD!3K4rDBd2mp+k8(i=FUdGAnq+B1*)omSW5p?gt2dBf_!q zUWWgE&M;|C1pVLtf|Svr3%yQ9?7Cn5pt`vSPKD?6wTD?c1>03B(pEUvZaU}@kwY7F z5ACYM<#5ST#G}=XaN#cetTg<2?Fr*%3S+P5*W^=N|7j6XrT9+IhQym7A)f-`Q}2zD z{}?EP0o66}+uz+r|Fy_nI`QnXmb{-5f_le^eBWCd&|NREqxjGz%Gpg=)4o#|oSHhH zepiiWONBL(@O5Z-*PIqBfQH?+F)Aur-?d?!)NMYl)Pa~S((fyd$cen8Q?r)+^;e(h zI-8UuqbCj8%q>>9T~;~zm;riwrz%O;d07=L_$caCyYzDYLO+SH60T#LJ{`FFM7d#% zhUWIYG4}2GxI8!`f7)r1eLJuOZMuQXl3|H&$4->DS-9`rC-DGrqC+FFp@C~&-%mJ| zEl6$#2PqqKu>bhW8bq;Ib_leXAGMou>%1un9e#|Dar)heHF&tq6t(7sz@%6CQ$k0| zjJIgWDntf))v2~$1fLe|)9n9da7Yw8?4cP|W6Hl3U!iGSSsl~nG7j8nR6aJyRIuyf zY&fAzp~png2I}eSOB?fTxc$d9|_@@&((vSUzimUpVXdVroEocnBn`eFqR554NsYs}EnIz7pQXS!_q zbZ~NBDlhzmQVfYu0EI_psm*SAS_@qK%+7?VAU;!&=-UjuKllgu zp0~yiy1V-33dSKam3nOCX>)Sy^8HEQAM;f;`RFTIlMy(sZ~u1E5jZWXqj`iRvl=|4 z|7P?S8|cM7Xe4=BG|A!j5kP6UTQlJ#^Xs6e4v%Wnw#%d52l4jFozJ49D5{9~JAQ|w zbISRw3Lg~>u(-*?V(PW(fE_q$Ugce$I5`z$wel@OWumn_uzrEFsV9pHM)n1cTyD+m zLkRRrJa=E;&3k(&FeELvrHKj@E>Di1cgw;x;6$?6mFmuSF<87fN$9qh2D_aS91%dW zbz7K@`(UPfKKDZa4h{y0Wdn)>%BA@WTBbyn5*>Qe!qEzhO$DRK_gbi zKq9Danb^LHu@)2pekQ>jN(M{Fe7RyNs)orSNy89K!Vtb9syg%)@)S?!+mxN#DMRU_ zuSM09#=E{#)Z-}$XY-9yH>=A>x)r71+dY7nNnr-r9u8zkfUzC{rpN!=WzZqUdb*_m z%m}z?K~vyrVsWgkOrXb7b(>S9`>(>}&qKZTCy!bmEV z)$n2^=|*d*!{)=h^n}!p9^DX$?pp%UGA>>b3eT#c?kjc|oG;g3Rw{lt4bu2G+aqHY zXuq`mg4=uXgvY0*=!bb>yTxNLPua#YPMsUe6Pd>w-Vq~Cubv{hlyjzi{1C0o$$#|v zWh6xk-7!}?^8`&uHsTy;@w&>%Tf;)++Dc+YNh;c!KVwNzy?EdQ+lwLT^H+Nni210z zy-kPHl}i&HMlqjof7V!jt~q}F)MKhF(-923HzIob7?#A!en8g{sI8&3Pc^Jryp%8)fG-SL%QXzzx%lfrc;HbCW8^D4Q=^C#a= zGJevviPxf#U_fVI292-ae|MN?>JRR53+-p{U_sO25Fg-1g2~SJNdnt?V0+jQCQnNg zqIPGz%gv+xnoj?A$1SYdNYT?(RZduYsP^mM=zzfrD@;a zYC86t!j*n2hPjWXWk|-R_*udALTw*uUId&SONEkEBg^ee~do-I* z=f{pL45)ZK{dh#3iy}H~P+V5@48DIeuIyi>d1;zhFFtKui{4mDiUgs4@v0iVIIS~D z?vX9W`WtOKAC7Tc4MOuvkM@b8YblYq@il;>rMgrM}0T7l#o;3 z>m7#rcjrYMFblbPRJ6lX02%CJ`hD#i%4%aSlk6irt#^TrS!ve%Cgt$cc0C~rl>0s*2)sd zb)qvWE_yt;H%2nlTUJQ93y!A-@Mif&-DwRw6nZt?AX;6q0mX=cLD=0r$85DfTvA+m zKio98;{P`fzR0E(qk#$$s%H$r#&#|#3poD6w)xi`Vr6(-vu${6xj#lZoqIMCy}pP` zH6H$^g3bgR__Wz2BpUaQ2`Za)R&|9Pi$9hn(%}rBoy~i!wPvX7tl4NeD0agldeV`+ zbsREkrX^qd^y8)HWoOK2@*>Sxy$yuZ^6d+6n?=_!ogJXGAk0LUZ z6`>HLrLk-+mn#WYT=$u$tmbD+5bIm^?{;q!0p)Md*dUZYMC0&l#6%Dz6vDsBjE736 zOKVELcpcVPC`!vEaT!3ivq6K!5a4vUu$}T;;fWkci2p<~Qd=Mm8V_+3GbVlzB_2XR z`P1_6+h*fGsRHE__Rv0#;l{W z>PYn08|j9^hRuk0CTfQ{dC=4(?ukeLuACA9k4w}k+KE!l^`~(X@gT5PdkC63`_HCG z?&lljNR@7H=hNIHGp$3MwME`#xxvrj8qJ#ZCVu^IkN%PoSw%?F4{RpV zYdHguO-*KHQgxwL(JPRRKo-l7G}9)#N;d)2H%OrfVj6_lN>upJFTQ=HwdR6%@&his6oyoB-oG>eKp6=pHla9Y{e;77Ao|vN;BwW1mIYxZyuDTy3Q$<1VY|ySWUVH70^h??{ z4MpzbnZD{{wLasn_~;5+l61%PYrz3g7M2q~>koofZM~4P$Spm-73W+uZl|2fy*jgrBJhVd-71OyVI3^TfR| zpL0i%&y=eqC#xtK?!{>in<&|^XrE%V;9y*}PdiksuNi$ynyMW>9y0dKeSz+AO(3ZS zyRaz7RuK_j7I^R+(8zyx^%PiKfr`r2JUQO&WdAdAXhuZ9>5Sgruo~mh_`@h>Vjz%( zgT5eFs!vKve-HH3yr^nxh&%i7D+2mf^+2~heMm$0VI8PumehzaS1M+5ml~g)2BTeB++bq9p zcE8)Hh|cURa62|+TkODBHy%4BoAy?8BF|<|SEBWkoxGa%SW9HrFV67U8stnDmLBg_ ztK4*J3QT>%G1fbZ>~;gSz=d4zlI*R0nMu^u8-(fa$LnpQl+g}1n=!1nLs8kayoaYN z^RBK@4w)lN3ztU^0^Zsa01lyhWeU8pl?C)1bTrF0^Kpe*c$|Hv#vgh8i5v)^#l*stlVGLPHTsk4Ik`vvol<* z-X|S}Kj)!K14&1tfuxaGiBmp4H^Ab8#4f&mnxb(|l?$;9idm zPxx}*)_(bYQxZbG6Pgj`^BO&SJWm_JT2J;pxK!%aB0h^g!x9)UqAw$;bl;RyC6K*O zcM5-)3loW#5crQOg?rti1HiY`EdWu9|NaM3eg3y2_m0HYh3(n4Q1Uk<-SAC!u_pHP zeyg4QPy+(7AOZz#+%W~owesN?L8)3l6UA1_ou<`j`_s*Jo%4eD%7XUGv3|@bk9v@% zC%Bm~?)gdNQgo3ViSHe)+ynyv7*YVkMby??cv$#-uk`OjweJg{bTkOtHDHqZp9tsI zYzVO81U4rp3o_(?GuZuKYMLV}rvrovxHNaY8kU?Le)gP1EA3;l_)nqYG6w@oTTl**tKh z=UnXiEWi8JZ&n!aY2DJi!p*FH5{x&padkkYnUdMAu-h~|;kVBCi|g0gtm^(&B=Pfo zlC5^Jp};F<T#7aO2O4~U2s{umHAtq0 zIO_9B$vRhz9$OV!*N;Ju^gc7?=hRHrO`cEvk8d@8=D1&zR-OQl*UqhV?K7wvaZ7E~ zva-}2jOVA$;7m*e)DwKQ6tony0VDYee$7E zU}WQ>U|(yW*Iv$I|CI?KC9~@%HIf|EpCM8ENbtj$eCKolK)5#MaPH;C+k-;$pr`qc zCkU1cu}O^AS&;~P-W4Ok+&8A~W7*VhTagA|%;L zFa1m_jn$J`6ezv(EI4-9l51GhtY!{W;d9se0?<>M78+7Of-?LxmhAFvIUNc<7QE=4 zKk?@;mx4?2tqVFWR;;QUd*o{5Enb`|*{7)Tau;^;zKHIhhby5!4>NuRZDxjWOBAuh zb|=LVX5}C4z6J`% zRs@f{a*E_@itbQ55!3B{ef17pM=gx0DBzQvs`vfy=0IsDfuE0pMI620PsQ=15VsNo(8PCM;~#Srg?yGwy-|Y+O*rz_TXK*F#+MM z(MBHb!|SGOl{#W|-z%W2eV4SG#NN)Ufx{bN3G4x!!1a&s$W|)6NFMNOQ9|GQe?Y`s z;32%u?2V?S(YJw{`xXTXAZP#+=&OXtBB0WK#>-EJXMzm{7OeU(;{pSN#~1_L?82(q z#`Pgt(3G>_6F&4tjt$n9r8WL}oN+2AuK$-8dEF)X)H=vD;%DWHO6=h7gYD=wdt8Hw z;~f)cg3ecje*BMD4z<;pUrwi6-@YiPd}~~La`3BBQ|FWNYc7#yg;bCLhh5=f)DMC( zXS0L3PovwpYPA?XGl>R{mAF`3fsVrS+I4ONn2zPb8V50c;or`Dx=Hz))yf}}6-{^2 znG%p3=6cLGW2)-<2dB(MM$lxJ{hodJ(FMir6(B7-oF?5s&QBrmC&pveOW;BBCj?E^ zTdG^Dh{Ui;W!Nh8vG~!`D{rA$-m^w%SPMm~6zwV%soPj@oP0$2D}0pTSe3^2CQf<_ zNIx&e36RwtG$+oR$A9baX~C{X_8m!ht|?Rae`1&4xQR{j@{l!Fp(QI939X8{PoLqJ z&Kj4Sbx?%rI{J!I42PVeys(K;rwjV|_<=w0L!Hcp;V^>=dNf5=9JyDC2B!1yRto=+ z?O~^XD$SKrgN#7GaOn8@#S@4u-CJqn7Evrz(BzC_bZQX27SpHQK456@D888~rt$$# zkl9hm?!&exHHdK65N ztA|@s8!P&#I5TbHgM(H%i+IBUI84r-{ebr8dO`HHL{wZ?#=&Z+LN#PEJ-$tKfVSt2 zri=C0VLx1O0|qk8%9JHzAHj9@c3dQjeLQXe?B8XqW>nX8=`_jQCkexMX7{Ll<>!@A z8qyv>PV_RH9s%e#G&#q6FvUzWTPJ#@XI-eatQl*UtiU=irEJL(ivtoHd~{nn zH>>PGF8@Q1sHXSTOqT;EgV)*5*PLFAKP%1&kl=B(aZsjE+N5_fMHPwWjcIdLBbcNb zEwtUC&HRE-sh_aQRxvlf!(`40E2pSfiZBhx(21#^{OjS-!-ZF!TDO=lZs zLg(-p-bEDgaoevDRQGJ|>&6oq6QJhI`A`M;yPOO{`yZ(@4sQyqSsEuOG(ZEd^d5p< zR@%LJbu_mqyOECKNO3GAU z0o0+BIcM@tUg8-0Xh)q<9E#`Cf;t( zt6aeZh*!~Da|M#z97tQWQ@+WjTKCCYE9%aE zWiLv6X*}Ua#Xa&I(zPty00V|oLlBgc(s-=Vl{6z$l$;-J0+aD|=7eD|`*W`W;-Y}x zdbVRoL#XEsH1f7(W+uV)ysOh0z+n4I)P}Ll_DYdl=`a-gsJ3-^=ICFV)HKTMJjhOo zo0pui;fF$s1n}aBm?DN4xhtpL-n6C8luJT1*ppMPxcw;&s>IvIJU7%$nO&dZVY&Ih zHkW)1oRAhY5ITVBVc^8#E%Ts=CHEeTNXZ4X!R}&6L_gF+E%D69NuMz8mo`9T-SNRJ zgUl11eCiNe=ePOzA=)h^!H$_qQALpK+dQ{p{NhKc%l>a&W<-iepkaNv*bpM>Ag{_; zS`eLlQ$+C6>%kGUaQC`qyjLU|EuRx2pDVd`4trkSmU;3QIWR~a4fYqb(sH9m$TB#R@`A&U zA9>a0$n?u{n3B7A=f>v7j`zRRF*V7*WBf&ny8{d1{>1krmjzJ7@!Q##TKv(KVhF># zNzSr9-fW)oue%HheS*wbvzL0K`|H-Dd$ag2mC|z$%AO-x543t8asMP?EgWjml}_r4 z9&FrgzK*I;tcnu&tek5loNxc2YJeq!<2>?WyWVC|!gMyn3_I;ThcySkaaj4&!&FX< zX-wzw=Vl+i*M-PAWJmZu(G^LIx;)+=Gc6a_1BhHMxBwuuUDJ-G(w#m~B^e;FEYnL_QrC2zVo`Q%`EpdE|t)$nxUzzPdF^d zgp@ZO^N#QJw^B7?jo%-s=LlvMwQ0U&UT~z`{7l~fvg>>D$iDOFYoibuZc6YD2^!4X z?->qj^Ine@QLxosP!zU^L-uq4qZ($aV>Ck}p~;Sx9<$)fVWXX=nhB&umc-&4Sv0Fah|=TEw@#kG_7-CXDOv@i zFP=^Ipma8uqpGtY8ON*}-eu3|Qdw^GuC)*&$J?r-v_UTv z@Ff5qas8gvX}YmBV;^8#KZ~k`SI0m{u-l>7l2G|>&PVf$CxKPz0T>;L+3&cVjuq>H zKf64tTuI$!wQCg@X`Syrd3Q=0Lj*Qq+dFRJwx}bw$pC~S)>(4|i;%8Wbb0=9Bc%V+ zj_v`2IaU=HCG_MC5`I`K(w6{A_T`v9yz=gzOCTN`;f9Jz0Q0)uED! zjWIfd+#{oVNT(c0Us(DJPHZSL+zdx0ykj{M<4FIXl#TG_h($CmIpZGn3G0kor^|^(I1d!=whs2FMO;FL`>7(UsBKAh$4=bwZ5IEc#9E?@QUtxGqpgq&>KzkH0KdDb}2tfh+0^oXM z9x`8S(!NLL?AqYtz;|*`Fx%tfwuxfnFosGR-e9^JReN?PN;AI3tzqM7ux&1|7Y|Zn zBU)$6`FFw@rLJwS#{1$^+{raEU6@r+-d zM-h|BQ2x^w#^}&s{vq7vq;dT~h^J*toQjUbClHz{TArGcGi(I_77ZnxqlIUC*Uf3J zYT@CL8w6REH@hSl8%j_x{S>^_BVtSo&dkPag@@IVX4}h@a(IYMx@bVi7kY>k8AHJgwjDGU;hEW>71kv8? zhZu!*8Ol7@9~T5(0Yql+tP@9JfpV8nx>4`vkHlMJ0!q$LL&MFgkG$UXNssjPW9%~J zgr;)}G%@ZA*ZMI*w9|^X>SK=_aD?|%ery`%Nd_WbySFvi@b=U5XBE+1NyP3WAsZY4 z3e-<>GXRiTJ7?1*p4}P67NX81IR7Pq7XDPFs+5L=#NtYA_cf|XOk$Tv9nVc1JHTLBmCA35O$kkmpeW4 zt5!1jq$vKvK@W6b66I+1do#Q%;oK6OBSS-{O!|WWy85p6Hx>ll_WK~qPWaPqX&~fh zbxMRxgf}#pEMJrmYE7S5QemPv`Uo|oh8vLfOqW4Wyp?aL7OdmTH0>x7Opi;L#_ebI zv^?Xd!vzw2kqmC61O9csuXn4~`{ImK6VcPBzwL*i(B;1@BZG%Fo`5Yy9~|Slzvn`n z^BX})6iVleP>*d5HwW+yLWyy*k>O{M)`_}d(h1m_;#ZbrEQsq_3jiXRfZV?Dxw=GQ zvxprzj3xOB2q~@mDFyy${OcVvX&5qmHY*uD8yxUMLO#S20O;qn=FA*0haK84RG zOBZ^+Ip#RqT)00XAfibA0I1}&c2ZC`6(dz^3L1-G9%+aflwdM#W~MbYBUKG(hFlZ% z(uBRbJbIz=J0S5$l~sUob7V>}e}aV0OH|hH$SVnWof)fCRQOGPc&^57y%a~5;Lr=I z(dcWlzZC=wkBh@_B&B=s3lnVd+4za4mkOI_zYvjijf|m4>xu4O_1jAka0uqo8yY}# z5A+jirM<3$@oq)VED`Q!#0*$%3bEXS_+bWr#4ax;Q1FoIgKs?P46{v$6ze9HAyzpF zZFhlx2*+i@A^?FOeQAf2l^>|eQ~hA)M4pd0INf?>TKEfV^r+_wY zncy13MI?_%0M|3!W?~n961~d`hEuM;4ROi{H5tJ(o=FZ^`PMRx7~HNU`a%b z@T9iz1^|hxi2x$6xF`Vls0DJDz8Op1w*QcKKuh^X4RC{rb75n%CdoWqMFt=CF@utl z;}loZ35e0bXhMIw(Ln#G3XemX(6ar7ZECusLlL_IfFp&&-(qbBbNGpA8O^#q{PhpR;L&o<9r`i`a zO!-_0%G~VBA9FcWWL7S(e9Z=TK5O#k9SPt<#~rw-WSPnnVuA#kY@25v7pqjf-pp(t zDAP{x{FM4jXC*cg!EZsc(`6j{QkQE+^VzOuA-x^{&Zh(4hA+S!$kGjh8FJC)ks;yj z+QmlsvZmgfVU}pC9GV`BkYa_)m2SkhmFV&VJ^if56tVoVNmdW7B)%r5DfE|XSh2{E zes9W3trah+s1fE8R2k{Z$o50uhp`jIv6<^*3`U;cs!JCpYQbY5v+8I(>?uRG^n# zT32Nt%fhSP?YA`=*0Fk+L27?iZ}*TZq4_WzH!p3Bjl3nnXHS8f^hF~>MAf5ThTqCl zv>0Kng0nWDx$qCKC8{1wp;UQnG0I^G|4I%q3sHQw(&U-ilj2LaI$Mq1&fN%KdiQU5Fd71Acxammj#AFXMh$O<`>v!iqE6{Q`a6jw0)3 z#@v~=t&-zgTzL`Si;B2l+Z`sY6u8iI$nc>4V|>~=+!ilAq_pbd-=>Dfdl%!|De`g= zN;40M1Qptnrujeu`+@sCBADf2NRQRkXTJX0*GPuy6!P{kz#C1qJ^ipGl<#HX2Q!c$@EQ_GHCr z3t4`7A{}9agaNVqyw4(q3)i^WL-I`+2JuC_6^s3SLG%%XaI+6337FyV(kyeF-V6PpL4}1e3{&dk{ zyp)1&L+Sdraw>c3w{O~X=rcj^!!`9)iam@+1&ZL2RJBd^!p<&+kuHppM=*Zu*buGW zw6mY>oTEC0?xqo8-kZ#u8v*2DN^_&Mu(|5E9do?27X_~t6ZI{RB0ZSbEqJ6S+*{+o zwO5qmbvSM78&T4RErFGTsa;28Q5AL!J>j9ss?ap_dncd3D0uBq!c(UI(%N{-HUp(ZV+lB8NJZ*X|sRcS?q91R7l18!5$yp z77A=UCdO&8=a-CVkp9p5mq)CCt7gOBMMg+yh76A)Oylv&?P_L1gqbWVIG3NXc5?qb z3UgeiT*rmmzNY)kmxNwG@A@F>nu3hrH$_gMoR~KX=Ny8{Cm_S+p(qt2Fj|}B>rfj$ zylXhkrSX?@&dB>eU6dDr4}af9hyQd@4t#>N{UA zi*M7rXUvk8ja$aTnLF{%SzuyR<~iQ}NCrg%ED0gD$V?wD!_|G?8#J_fHrM3715yb! z&*7CBRET0>^eR(k4_~ICPC86^9~?%|!^Wm%ap6ZE^2~~~qQOVuKRoi}z+7YQQpc^2 z_=p)>SSd?6jV)x%GKup;L2kBn6dTRD;CMwOa_1d=Gs92*?em}bfE;^aE&CxduJs`l z{zlB?H2mUPZYp~Q>9crn%@DEb*JoJBGcARq*jS6%Y&O?Lx+K5G^1aa^SfW;GM0mxc zo|igF0>a5-q{RRaL8m2~1-8JmS zl)@6=iqb~4I~_qH7WS{v$XEH!&GajAr?Sl`hI7*0oSPbRaxbK&!@D)>#GlTFg+7_t z(0@9N_TtOPI@XWiAdK~o^s$%31n^#I5D~-DuL|Gu0CmPnPr2^WhW5j-pclop90wA1 zh(hS0RhN(Hg5HKGN_Ahc0|wJKYuObIzBe0R6>1B$n!L8xK&XjMENxs>?J0rXe)&S>b|}_ z|DCx@{VhFf5$&=Yf#ixvf>=(6josLU(wtjCYp_%SWA2A@t}1VCFx2 zo;!_vq^@omydg4Iol3pdp|0)(HEGhe|y?Add!&*d9Xr)-#hD0OTpF+~Lvj68gG`%-j} z^?b1+d1SDl{DPYuyWF%o+r&62xbyrpr|1QDt;x=nrDwTb|BpGHY+;wU+$cPQfMy&_1?rFI3*tWuOwrHJT-TNUqKgpLYu7e)|4%V*NGPSDno?88w zx$z6iS?>qOOHDhJU#y}b9m)IY{Aq|kKTwUMK;)wZjxTSnjrk)(Eb0Lc1G90BKh&EG zOrv&KKAs>e^hgwT?6!SO5{lE2}ojh-cs75JZoiCaKeR6+*JHa2L>b} zl0Q!IG~Ig}u!C`{Cmf9CcgCz(lpclMQJLo2d$6sc#PrCmbpU_Shu3#=1=xB(nwZ4 z6%s0n$9EDG<5#prVTWp$J(b(|+Q8`mBD&aML8_v(U>Zc@VZf8Dn_d2bP_09 z^l~b^Es}$Ba*E}zQa8I~ondTnpYz0|ugpY)@+wt!X}=gNvr3KUSG!$A-v zzQ5`?Hh|WN=oBP%Y5i+?z-u3LHI+OF24lk%Vr;#o;)%T9D_^atk-K-dPUIjC(MZ|F zzXEjbH%1x<&&#+`ZS$x}m0G|f^pb}@$V5pQ6NC5-wo9M6m9?>uYCK#PmMwio>!sLb zAJPVgEGkE1wPpp2n15eV%%YZ*l!*BKc~JfA;CEo0Qc)wTXIQ(YkY!VJHO_NsJOejs^s)w!^BJTRe-1V5LqS@QM%W?vLtWxr|!#s3VPdEM6sEhxnCextxCqSRG zfP)fQ--PQU=C1?C--mA6%xoTc-E1FrmQ=MF-o1)AqlKXQ0)$@c-n6+=6-lZ=tIl@DE)R0$?szWcaM zeA<+f=v4GD1bdQ_6xcKdx_fF9`ds4%pzC}$Ga#n!aA~G1J*Vs))j@-@&9Yz91o_5$ zFq%+IbO?yA3~@d(nI4w>k&G^0<2DqhPX$zdpnh%}WBl@aZeX(gH^KKT3v+bLzbbZ^ z6_(8PS)7nACdkBvh;xgi{yTEk;OW6UQ#|SRGFADdpv$*RUA2+`&V|}{X$@8*D?K~i z#vneVG(86N^Edhe@7-g#X- zjkx}ne^ve_^)UJPx){V5y_tEz?^GnO*hK0Uc(T$GNvIMn&sZk*vuTN1 zPql}yDoxpb)7)mQZKEG6x~c5Sjay0?m%uXY2k21?h^+2OD>}E7yrVKVl5*S!3W6x` zi`oESrr*464I+7DbxdMu@7qVue$T+J&p0FjE;C1EH3L8J;OLMSZ)jl)wb1b_b_ems zpQA+2Sm;lAoag--lL_b-#RHR#yf(HgeOII5606%d%%NUXTjMbT4Enz1?~pEP%7{A= z#{*|7H4?Coh34}FQ}POImCns-H}(5QwD^0h&z-(lvCHQ;u}e@PbZrNve)zwt2_Uds zM%2K0c$5PsC`L2G>S8BSaN>JdWP#!{1B;bo)(571*TYi zfE}n4u{~TBNC*$+N2%@61hG3b$2S4;bOZr@a~{<7BNaMdeZGD>X@5B(K6b=n#PaxVOd-x|Cey*Rjo5~e z<;)Ap6Qq7%zRhgnjQPW@DDyAPKXw}es| z#L!|_1F}~Tw2HV1< z*bL8b>F%p;i@VzlaN(;Y@O)io*$D%@JFoE1-0%Fhqb)PqjGOT2c3t=Oq2o3sVQ>DO z{0oFU5qys7X8r`~q)q%0w^s7)T~_TMy>K2P-i|yDQXQ(luduo&Phf>z=NENZbJDQW z_YL7iNI-L`(y9l<1sdKsK)gt;!j6-d4F2HfWlw28l~qjJzAZ(UN9r77jU(*lj&p4~ z6l>UNag+4R$ll9_KyQApcuv)wA1^KCCnInu?)>zW>o@*F!~AsI~3 z>`snWW>UivfysDhEumQd-`^xP3E;S|A{opa2^I1$gu;S9=lb`Vo-LXeoY3-D@Kcx4 z5*D0+8~p69As&|xBcdq`gL_;x+W7w}nGz2qzYjc~^CAT9KXNl7^W9e)X8m6am?{0E z=>YfNJ^9bk=$1q*_wSJUe|nO-07-A-+&Pu#W#<-B>|bHM3Z5S; zLoaSjc`1&Zea>vl4)<=cRqdC(|LXEVpKNJ+&M$GUait?Ks!C$MxzVlJ>)QI`;jeo|&6HHI;f9epy$8vSqG$FDSW5_~2l4!5o{$e(FFEI)Dd5PY z_MXHK^>R`@TPrWrdS4ZeSX24@m@S;BhjKh5&B{uj;2AoXIr^UJuxkF8E%pZE{&g2! z_hG{xuj@709y=a*uezdaykd~{+yxRz#Sp5$E|N0XF-`w(pqY$(L51Tp^n&2N>W;fa zbMK!P8m6EeYj-?jdlmmXC9X4e0CbCEA8IxN6fMS4Z6g*vik3#VWpgvU78bgK+$sVvY=nh5nu2aNd6h{Jmg?7Ez7Z`7=(u!3k467?18Zh>$c<${q=zXH$#drC@db#4dMe4y~UIY`^b*k z_F9Ve8-2Yl(X#v-y5R4ZQ9&b~hmmKa1nGSamJ1Su@1{VIQ@*j>1M-amyD)P1k&(l! zM29_1p?vB41b@UFy=rFw<%}=Zk2Jp=x>!&)GXMU{8?LqVWnSMF$<15l@Y5G8ct@)( zv^Z?_VgtpUBnC%R<7uH}+MM0LXc)Tc(?mr1?luExu-5&FO(Z+P*I-;xd+47 zB|Y73YI`T-u_JxBup0Qs6%{Bu$-EGu!3 zD~Z$c-5&bdoPLJmt;V;A;K-*tV&7t}biaFxM(^ET4RYU*%NNc8r@vZzTyM!b%xBzW zsFcKC05bDOni8*HRZh+`*IgA;XB2;oq&4}>J|Uuc=sNfd6A^SGrX0i%+%g&)Zm^&t z^8LVxr}0AosO|M5IVq9iQ^tWC&*%NYvB>$14>&r-#okPe0gMd>ewa+er;`|#ptTiN z3V=xt82B_l&FzaQ||#wb)zCWyb~|4s6*eqLqRM<$~? zlMZi~?P`|{b6?~CPgepeEIMTYEnxJPt3pQ=?%ziAk8pMg?ksm415E19M*#Bz`;qRw zAH-tsy%WGk0B65XOZi_+%+F(Ivg!>B_RllP(>%m@!NnDEXQc{~$y4U^(Cnh~5^{U2 zicJ>`lR&FSUK{13~m{M^!A_I;YOLvZ8wJL!jo@4H&1 z@-?DUeyHY)b$9sEi$IR0#W3vt?HupV@k=~0`-k~&TlF7jg)9zcp6~9}SMl)y`eZhf zh1qaP_!(+XXuG?6OZ3iXYoA{1@*$R%mNIGd1Je{lWbdp7l&OjqobQdSSs%r`xZW$W z`&A?}Waw)|;8)d?Aix~Qy%Kx{a=^Ntkv6^(Y6)=oEz0^Yb_$#X=Nv)GW^pdTS6gd- zmtHm(-Zu@mgZ_NC5;Suey8GkD8w??h3EB-+CJk8EciBrzW_Q_3!s6^7u&Rb+cFZ{` z#6wo9whhP|m0zlg7nxgeiAVK)=mtheN=hb9MA=i%XX+@5b%846` z-EQk9+RSeZt3?f#$^T7Qh+)hL-vAo)Un<{LUO&B_02ue7DLbeYAAi`MH}?qA&Z`bJ z=#+}p@ABiq-E#sfJere9t`3S^Cm9C!rt~hXx7m;?=buWcp5>>t zxxX2(;XGao)pvEY*YIX57Og%`ZmhR)pm_UTXcs4cBM5KaS*dQF1 z0Ybv?+9ccejbwUb@Yi#Yd!yXG{#(2l-G%LLDQMb=O6|A#OZT79;0r@8& zxOFBXC;yU9DY@?IyS|U8iq1+lKUqG`^uXhzD<cw4aQW4;@5)@=T43a1zbaL@o=pyygH<-xkJxxF(am#%BX*a}owC!FDITE}K)t1`t;jDH^0bA5i~2LF2^hTpMNqIrl74KQG|xr$LNBbXEb zPC!i7cgvZ*AU>SG)(r!4Z`?|i!H3{`Bly3jnE6_a{tW7Oz#m+A5KspEoP|pHL*Dau zQ@*qn$thR{A|5Okz+FdlFLYKywT+MLNWm~O?$7QROs|DxzO{T18~l9f=6NWJz?%J} z47EnTw^b4U**6U)Fz;VX{`UVY;bQ_A**!Kt;KhHK%!wFqtiPDdPyWMX?g2r$?k&lXvO^9>YL+rJTIA@G$bPpTRO~2#?ZWpsktIr)bSlyeK_9P={amm4N zaDetH8WLaUz0Kr)j)#0?`#w~md}_0l1?(DiKa(LJe+SIXB_<@`sxqypoEkwTcy4`w zb;j>UVso}{_s`c}9^3ggm+I@TE!p3C^GTG9hZ`CVp7=WItW4dMoSHgi&xch01;m01 z0qe4|!+i4(FU|-k2M0TG;LXj^GU_+>HT6xmBXcHPo7D?5V~3t}&3b$@O$!2%kz%5S zX)@3o?#zI+zJRakDihZfXT9@(K-W8flzTL%q7UiV+YF>1@x5Q=%QvdaH)4IZT6q#T z6ZZ7Y2HpFf4=2y*8Z?zwN>@vnl=P_Suu4>h1^F}i3h|x{?J218rY_L@KE9f-rtA7^ z{}G1aDPo;s>KS@06|D2dHo*P{COM`#xQ49uR?u{Z`4^vrodu(ev-EwV0RS!LD-aZg z7X?)TCbvh!dYy)?WzTPFg_8TtC^;+=o^!l=&(j?0$ynf@F$0+M(j#RP55e-M|93YS zyO9U&iDqln8ZkrePoIg`X=Gb0$+$n>Z3^1i#1QsG-)MCf?giDvYi}tCfG>OW48!9- zxW@i7GRs{zK#>^v%rH-NH`!04S?tl&m&MyZFDxA1CwM5zX8a1BSdaSXwE(!zBT22y zH_^6jJ?+cxbSR-+afC#XH75VsX}7dQI$8*)UHDHW$88$eT3g+t@w&~h#g_t&9>Te!EUPH#(cL&H_r_RYcxu}r1w)C`l_*dnqO%^=bwPi{{x(KvA6{kT^$i) zURny877cRcXKzatzGqw0ZG81@o{#FwZc@FldE@>Wu3xG==HZA-o}=<%D(y-gv%#0v#L|PjC=Ynp= zmk>4b*|u(}U3?A)DNySi(kmBSs!(2x?<#aI&KP3f_m9Mye=bp&{)sRUHa_7mzUBYP zoG<_1EjJRM@Lwa&^e?V`iU@YG{~z3j|My2?vw>qxrv=rP$gA(?^SHumwDpy<^J@LC z#)%0&l6foIuf$Wv!(l!wZ^OUbMBaMF+#3_&DH&*1Y)`$(v7sbaWL1KPFqUY|HaGcf zHjUuhD*40mG5>UGBMTx}&EE&-KLz>_M9kQ1Nz6_Wjw?>t9xHs9pZe%yUo7o$>q@yw z*C$UC?6@b+u%Opww^f(HNC|7MQz#Ti`%4qi75@Nku2-!_(Gja!rN{o>T}wNEzaPP; zB9Gni3a}u#n;DSE9Wt#)?yL7KTR6n(sPw&e7=L)bmFrYll<-$XN-}A$Qb8h~5;j}o zwh7N@`ZK-nztKyWHlDgotLKcp-&1gG&Td%qyt?pK&iF!7y6R9Lcx*N}b z-VtE(w|B4e`?i0K%Qf^xvcp4I;7xS1sI@nn2yQfqWTF_A+kCo~PLzja z!{`LDgpH%or;*~uFCH>g*dralqB_Isl-a7rOvZ$!!|Bffb~MrP(M^7<)BTVA-%J4A z5^wN53Ppl^)$=Hp`&_^D?xqe*u&^Cfxz0Xm$bt}Ir-_%WhJ;@xve$1hDFcZ6W zzFDKsBV;*>ndJC6c5)KRRPG~{#%BEaNH!%}s@p=fU4ewh^o!rSsA<_u+Ij^*(Ev!@ z#(;{C#+B!Sx5{1U*uV6@O+El1c5$q@x9b!GMwZ=G*UY)|qlswt6H+toVM94FCLC%@vB>xDj9GoqUMy+K2OWbEw zM*eyqq7vDY>~!9X=#queiD0>k@{%odUo+awygVq~IXz6wx>`NG%rreX+IaPmSofmN zw1Ve4ZOYuv*NG?4szTda^sQ@ynu=XTcyluDO{2wirDSa;R25$ya`T&{aDV98jA~!N zB?Gu}!pR_Mo+JlqviuIMg#Gx(6YOxLGz2^MKkB0L3Oo;kRqNr2q0 z8*fa*=PF3iA7wNb?*t-x47&MTX~_$MVeE92qr%o-qG?<>Tj<1l?{VRIi_gJ#EcFa$ zyYIF-anhgT&|Fa`J6_Ko_xe35w+X)xX%c*T?CkxUf&oES)Rprs$bvdBYU{DtOA=8X ze{0XQDg$jIF46ZMLZg!e8$+RCzxf}p@*qu%+7|R@S{Sve?}(Vw zA*1l!(TpFk?9b)UuZi$&{4ghvRI`%7`SmtaGLHgU34^EIN|gVo}*+{pob$K}^V#NeyP$J4!l)+Qj>6kvsv!9SlF7 zdmU;^0aXz1GY>wLgYm$Wf})np{^A_^{!| z=^N3s7phocL|fM>5!`!vmD%Z>3Z5y&H^B-za)VDnC{2E;|Dh-S(@&l@GUM!Q@_|}A zg7a#vJ2Z$f3n?}a{j_`ZuY^n1iuoxEOTExhg%YW3(buI1tw4Orug38OQius2KG=%B z_~D3)h08|b-gNz4xw76C$bn`e4bFE>NQhh8Jv1h4(iKvUu8kyWYd8BnGVwX|$Ai?; z2*y5}>Rl7_l#Zz$w3QD52%^wG)pN&OPh=qtgiH|X;B)TiY&lc5pRR0Hp4u#&E1(77 zp85C<7qN|Z7}dO5iNUFbn0m&c`d5i7{;9ehKl$o8`B>7|s$*39R?<%ko4 zk|L8M<`JQo@7-&TTWe_sEU^51Q6>I)weKw0`PM*Vv67;Mvp|J&{zs2*UKtkrnD4@L z8r~c-k$H>)+;JPTw)wfB4_3pjE@GZrZgIPi45lHxZ!&ZQsmuHb5lUrT7u*>)rgRcL z!5X(J4i4AhOwK(`e9|Fn&?S`veZN&slcDq8OhTG7$CZqoB-voYFBV0AufzzPLz!o= z3*u|y9hN1T84zHfD2o!nntNM-I58vOhkHQ4ZfmzJ?SyWkMUu^YHyWhJ*&SVu49H5AE?4 zRw#}6Y?%@M2r87?^OnnPC#qyB!h?IDvZpXZ_C z&A7)F7c;3F9V};mv4n{2(bnd1K}=LzcZ#a@;S}V!Q_TZCeRTS- zpWdo~%kA7y-aMhWVh)%}uu3iM?K`Ah$9A>$2rDwdZXVfK$zFA{!jx~eEi@hYA0mIgaWhXn-^&2Xx z2CtLV^^B&mz}IJx+l$N0W?ye78@@dwgcj0nb3)Yz*z01F2F?Yn1`ckDfl-^Jsa(t0 z>+3^Xeb8dA(z6$mu1E^=#ZQn$RI#{|o zc{R;ly_`GAvh(gh?vV3b)$l^i}s9fIQUX znu9|;zrFrTs9;uu*ZYQNKZYUojcDt2X2d7jnlAoVPTJUZj-YiB?}MUCUb{Z#ZcrvB z$$1m@Xxb`CWcEsV=o4&T7kdyIfELVaiCA?4>*K=|3XqO6d)SS#J1;hY2!4z(8N`dg zg$W|~20R3J^w0pLPk?E3fBaz3dNkTPWsCVGFQPX6xc3exocW(?Jc$U*W$xDmz}f$n zzs%DA$3mI^f1xU3bLrnY+mRwP%n_Q^kGT1d<9CE8{eG{hIu%g{I|jV@?jR*V53z)Q zFJh0_TB6eftyfgg{JjUkqJemX>b>Cl8^FWcj4V~ec?>+nIv-+_5)DW$0=#e&xC|hJ mtsS11L|(Apt%0`uxCT=1ZJQ#0mHi_JP?S@ZEtWP5`ab|4Mhch! literal 0 HcmV?d00001 diff --git a/docs/images/favicon.ico b/docs/images/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..38e43d6b5d9641764fccabb42eecd78031ebb106 GIT binary patch literal 15406 zcmeI3Yitx%6vwA6_`nA!fL@`r905OE1TXB7+JV7s_F6 zx~i+Iy9^yVbY^*Z`R5qOO2x>KAw$NYR}Y-NS6)?Bb))WmUM=)}>eym|>MLL5pC0M^iuosJYoTt?A?QD7Nkv6P zPd`@hDV@XciO?wSw?k^bzoEyH*+8ZS`Qve&Z|DPMls(S#0q*ZjrmuQr`$F$Q=OFdP zrNza?xqftQ8T<}IcN?VlNBtf7%_ z{loCD_YT`%TmI~rH&b32hTpQXvVO$70y^vVO<4mp5x-fQ7m=;?Ij;MHO3c&n+2r=i zv*x6^np0#544PMWN{;(kzKkakirZA1Hs_i9Gn6l5?CLFb;w|BMy-Vf@IY?!W@rrKTg&wP>m6tvefw(?#m~EUD|C$U@l|;D_Itb3 zLL&e8U5#He{%Ku5Lv!z?R@z?T_Rm;j#@_;byXQn`^*PdGI9Qnwt^`Y6bLAoJPSosQi< zE&ryyn<$%?mzQ^?1d(aWeo(4OFZ^`J{vx>9>)7qGn_N7Y)JOmV_>Y3)3%Kpy+}BYz zH~=?EP2lGNFzoig@pMwT%* zj~M=;>@D@J_PY(n0)0Yjub-iRJ@A^`YmKsl`umAhpMC;A>rEvZt<{(x?gZ2N1ek^W zG&1|)^NWg#_?p$>m(GH8?!FXigiL`OwdUjW0J>yW?N)rt+DnkLwKS z6W)bF>-;L4v2!%#b@Be=HXqtE>vw%C)0t)i=?eyxen&4b>A^zNP~ zu0^IkG|+Lw+B@R+N&Fs7fL;0i3AtJJxgB{PMA^&K?Q!WIB*xgDCv5!qjY9ie{!PBc zPlE9!cz-jd6Z5q^&w+l2y!HoY(OE`ax3KZ!XEAB0Er>0Eb1h+zo z-jCDNZ$m!iZ+6C)UydJl+Y6WWH8o}qil0x6pKc=d6k*97J* z$lQ%iTIXCp_`#(4%sxXixYceOX{%azDCc@6hplm+_+#Il!1xDbu=3(&#|7>MCd3u4h-i6)7p|$@iWbVOc+Sf%s@jn7~ z`?pO7yW!dg=)3wV+GC)}O*(k|JlWkZ^gHcwS~D9AoRQ_H0K@Y4UVs%M15Sp0Ux9H#rUUf`Vq$p%Dv7dbQUqk zw4KojiEC~erhMVEMGoy}>h;^-ci6Qr|4{l>N#N&v~ zU&wdsKG82dTl*XbD zh$F4@S98LX-0+SL0NY1kO!SRF@$5%#8uI*ut7Fzgt%aholeTymj9Q1DgiQY3T;uS^ z`T6-7`3+nDBO=FBz^Ju?8@qX)pMmc~i>8T2qdmd@26PD0J7BQa=jP_N`#0$=oQ3oN r=C1UqQE+|NP@g(R-%rc$%7P3@dl<0m`^>yd{q07O@pjtE+zk8=Sr$b@ literal 0 HcmV?d00001 diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000..458bf4e23 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,27 @@ +![CRIPT Logo](./images/CRIPT_full_logo_colored_transparent.png) + +**CRIPT** (the _Community Resource for Innovation in Polymer Technology_) is a web-based platform for capturing and sharing polymer data. In addition to a user interface, CRIPT enables programmatic access to the platform through the CRIPT Python SDK, which interfaces with a REST API. + +CRIPT offers multiple options to upload data, and scientists can pick the method that best suits them. Using the SDK to upload is a great choice if you have a large amount of data, stored it in an unconventional way, and know some python programming. You can easily use a library such as [Pandas](https://pandas.pydata.org/) or [Numpy](https://numpy.org/) to parse your data, create the needed CRIPT objects/nodes and upload them into CRIPT. + +Another great option can be the [Excel Uploader](https://c-accel-cript.github.io/cript-excel-uploader/) for scientists that do not have past Python experience or would rather easily input their data into the CRIPT Excel Template. + +--- + +## Resources + +??? info "CRIPT Resources" + + - [CRIPT Data Model](https://chemrxiv.org/engage/api-gateway/chemrxiv/assets/orp/resource/item/6322994103e27d9176d5b10c/original/main-supporting-information.pdf) + - The CRIPT Data Model is the back bone of the whole CRIPT project. Understanding it will make it a lot easier to use any part of the system + - [CRIPT Scripts Research paper](https://pubs.acs.org/doi/10.1021/acscentsci.3c00011) + - Learn about the CRIPT platform + - [CRIPTScripts](https://criptscripts.org/) + - CRIPT Scripts is a curated list of examples and tools for interacting with the CRIPT platform. + - [CRIPT Python SDK Internal Documentation](https://github.com/C-Accel-CRIPT/Python-SDK/wiki) + - Learn more about the internal workings of the CRIPT Python SDK + - [CRIPT Python SDK Discussions Tab](https://github.com/C-Accel-CRIPT/Python-SDK/discussions) + - Communicate with the CRIPT Python SDK team + - [CRIPT Python SDK Contributing Guidelines](https://github.com/C-Accel-CRIPT/Python-SDK/blob/develop/CONTRIBUTING.md) + - Learn how to contribute to the CRIPT Python SDK open-source project + - [CRIPT Python SDK Contributors](https://github.com/C-Accel-CRIPT/Python-SDK/blob/develop/CONTRIBUTORS.md) diff --git a/docs/nodes/primary_nodes/base_node.md b/docs/nodes/primary_nodes/base_node.md new file mode 100644 index 000000000..caf38df1e --- /dev/null +++ b/docs/nodes/primary_nodes/base_node.md @@ -0,0 +1 @@ +# Base node diff --git a/docs/nodes/primary_nodes/collection.md b/docs/nodes/primary_nodes/collection.md new file mode 100644 index 000000000..b9bfb24e0 --- /dev/null +++ b/docs/nodes/primary_nodes/collection.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.collection diff --git a/docs/nodes/primary_nodes/computation.md b/docs/nodes/primary_nodes/computation.md new file mode 100644 index 000000000..3e500dd53 --- /dev/null +++ b/docs/nodes/primary_nodes/computation.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.computation diff --git a/docs/nodes/primary_nodes/computation_process.md b/docs/nodes/primary_nodes/computation_process.md new file mode 100644 index 000000000..c9b594fd7 --- /dev/null +++ b/docs/nodes/primary_nodes/computation_process.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.computation_process diff --git a/docs/nodes/primary_nodes/data.md b/docs/nodes/primary_nodes/data.md new file mode 100644 index 000000000..76a48efb6 --- /dev/null +++ b/docs/nodes/primary_nodes/data.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.data diff --git a/docs/nodes/primary_nodes/experiment.md b/docs/nodes/primary_nodes/experiment.md new file mode 100644 index 000000000..96f684344 --- /dev/null +++ b/docs/nodes/primary_nodes/experiment.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.experiment diff --git a/docs/nodes/primary_nodes/inventory.md b/docs/nodes/primary_nodes/inventory.md new file mode 100644 index 000000000..fdd1e309d --- /dev/null +++ b/docs/nodes/primary_nodes/inventory.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.inventory diff --git a/docs/nodes/primary_nodes/material.md b/docs/nodes/primary_nodes/material.md new file mode 100644 index 000000000..fb0417719 --- /dev/null +++ b/docs/nodes/primary_nodes/material.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.material diff --git a/docs/nodes/primary_nodes/process.md b/docs/nodes/primary_nodes/process.md new file mode 100644 index 000000000..1fb86b54a --- /dev/null +++ b/docs/nodes/primary_nodes/process.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.process diff --git a/docs/nodes/primary_nodes/project.md b/docs/nodes/primary_nodes/project.md new file mode 100644 index 000000000..3aaa85b06 --- /dev/null +++ b/docs/nodes/primary_nodes/project.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.project diff --git a/docs/nodes/primary_nodes/reference.md b/docs/nodes/primary_nodes/reference.md new file mode 100644 index 000000000..dc4fe1fad --- /dev/null +++ b/docs/nodes/primary_nodes/reference.md @@ -0,0 +1 @@ +::: cript.nodes.primary_nodes.Reference diff --git a/docs/nodes/primary_nodes/software.md b/docs/nodes/primary_nodes/software.md new file mode 100644 index 000000000..603bcc50b --- /dev/null +++ b/docs/nodes/primary_nodes/software.md @@ -0,0 +1 @@ +::: cript.Software diff --git a/docs/nodes/subobjects/algorithm.md b/docs/nodes/subobjects/algorithm.md new file mode 100644 index 000000000..794860b48 --- /dev/null +++ b/docs/nodes/subobjects/algorithm.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.Algorithm diff --git a/docs/nodes/subobjects/citation.md b/docs/nodes/subobjects/citation.md new file mode 100644 index 000000000..7e5b5d522 --- /dev/null +++ b/docs/nodes/subobjects/citation.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.citation diff --git a/docs/nodes/subobjects/computational_forcefield.md b/docs/nodes/subobjects/computational_forcefield.md new file mode 100644 index 000000000..3896b9ad9 --- /dev/null +++ b/docs/nodes/subobjects/computational_forcefield.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.computational_forcefield diff --git a/docs/nodes/subobjects/condition.md b/docs/nodes/subobjects/condition.md new file mode 100644 index 000000000..2d1e05143 --- /dev/null +++ b/docs/nodes/subobjects/condition.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.condition diff --git a/docs/nodes/subobjects/equipment.md b/docs/nodes/subobjects/equipment.md new file mode 100644 index 000000000..662eaeba3 --- /dev/null +++ b/docs/nodes/subobjects/equipment.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.equipment diff --git a/docs/nodes/subobjects/identifier.md b/docs/nodes/subobjects/identifier.md new file mode 100644 index 000000000..8ebe64b88 --- /dev/null +++ b/docs/nodes/subobjects/identifier.md @@ -0,0 +1 @@ +# Identifier Subobject diff --git a/docs/nodes/subobjects/ingredient.md b/docs/nodes/subobjects/ingredient.md new file mode 100644 index 000000000..13ae0cd33 --- /dev/null +++ b/docs/nodes/subobjects/ingredient.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.ingredient diff --git a/docs/nodes/subobjects/parameter.md b/docs/nodes/subobjects/parameter.md new file mode 100644 index 000000000..f09929fad --- /dev/null +++ b/docs/nodes/subobjects/parameter.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.parameter diff --git a/docs/nodes/subobjects/property.md b/docs/nodes/subobjects/property.md new file mode 100644 index 000000000..1fba3646b --- /dev/null +++ b/docs/nodes/subobjects/property.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.property diff --git a/docs/nodes/subobjects/quantity.md b/docs/nodes/subobjects/quantity.md new file mode 100644 index 000000000..f42fe2ee4 --- /dev/null +++ b/docs/nodes/subobjects/quantity.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.quantity diff --git a/docs/nodes/subobjects/software_configuration.md b/docs/nodes/subobjects/software_configuration.md new file mode 100644 index 000000000..e6148efd3 --- /dev/null +++ b/docs/nodes/subobjects/software_configuration.md @@ -0,0 +1 @@ +::: cript.nodes.subobjects.software_configuration diff --git a/docs/nodes/supporting_nodes/file.md b/docs/nodes/supporting_nodes/file.md new file mode 100644 index 000000000..5a2e74555 --- /dev/null +++ b/docs/nodes/supporting_nodes/file.md @@ -0,0 +1 @@ +::: cript.nodes.supporting_nodes.file diff --git a/docs/nodes/supporting_nodes/group.md b/docs/nodes/supporting_nodes/group.md new file mode 100644 index 000000000..1d42e1bc4 --- /dev/null +++ b/docs/nodes/supporting_nodes/group.md @@ -0,0 +1 @@ +# Group Node diff --git a/docs/nodes/supporting_nodes/user.md b/docs/nodes/supporting_nodes/user.md new file mode 100644 index 000000000..f875c44aa --- /dev/null +++ b/docs/nodes/supporting_nodes/user.md @@ -0,0 +1 @@ +::: cript.nodes.supporting_nodes.user diff --git a/docs/tutorial/cript_installation_guide.md b/docs/tutorial/cript_installation_guide.md new file mode 100644 index 000000000..9608d4f2a --- /dev/null +++ b/docs/tutorial/cript_installation_guide.md @@ -0,0 +1,55 @@ +# How to Install CRIPT + +!!! abstract + + This page will give you a through guide on how to install the + [CRIPT Python SDK](https://pypi.org/project/cript/) on your system. + +## Steps + +1. Install [Python 3.7+](https://www.python.org/downloads/) +2. Create a virtual environment + + > It is best practice to create a dedicated [python virtual environment](https://docs.python.org/3/library/venv.html) for each python project + + === ":fontawesome-brands-windows: **_Windows:_**" + ```bash + python -m venv .\venv + ``` + + === ":fontawesome-brands-apple: **_Mac_** & :fontawesome-brands-linux: **_Linux:_**" + ```bash + python3 -m venv ./venv + ``` + +3. Activate your virtual environment + + === ":fontawesome-brands-windows: **_Windows:_**" + ```bash + .\venv\Scripts\activate + ``` + + === ":fontawesome-brands-apple: **_Mac_** & :fontawesome-brands-linux: **_Linux:_**" + ```bash + source venv/bin/activate + ``` + +4. Install [CRIPT from Python Package Index (PyPI)](https://pypi.org/project/cript/) + ```bash + pip install cript + ``` +5. Create your CRIPT Script! + + +??? info "Install Package From our [GitHub](https://github.com/C-Accel-CRIPT/Python-SDK)" + Please note that it is also possible to install this package from our + [GitHub](https://github.com/C-Accel-CRIPT/Python-SDK). + + Formula: `pip install git+[repository URL]@[branch or tag]` + + Install from [Main](https://github.com/C-Accel-CRIPT/Python-SDK/tree/main): + `pip install git+https://github.com/C-Accel-CRIPT/Python-SDK@main` + + or to download the latest in [development code](https://github.com/C-Accel-CRIPT/Python-SDK/tree/develop) + `pip install git+https://github.com/C-Accel-CRIPT/Python-SDK@develop` + diff --git a/docs/tutorial/how_to_get_api_token.md b/docs/tutorial/how_to_get_api_token.md new file mode 100644 index 000000000..7e38f156a --- /dev/null +++ b/docs/tutorial/how_to_get_api_token.md @@ -0,0 +1,43 @@ +!!! abstract + + This page shows the steps to acquiring an API Token to connect to the [CRIPT platform](https://criptapp.org) + +
+ +The token is needed because we need to authenticate the user before saving any of their data + +!!! Warning "Token Security" + It is **highly** recommended that you store your API tokens in a safe location and read it into your code + Hard-coding API tokens directly into the code can pose security risks, + as the token might be exposed if the code is shared or stored in a version control system. + + Anyone that has access to your tokens can impersonate you on the [CRIPT platform](https://criptapp.org) + +Screenshot of CRIPT security page where API token is found + + + [Security Settings](https://criptapp.org/security/) + under the profile icon dropdown + + + +To get your token: + +1. please visit your [Security Settings](https://criptapp.org/security/) under the profile + icon dropdown on the top right +2. Click on the **copy** button next to the API Token to copy it to clipboard +3. Now you can paste it into the `API Token` field + +Example: + + + + +```yaml +API Token: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c + +Storage Token: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gU21pdGgiLCJpYXQiOjE1MTYyMzkwMjJ9.Q_w2AVguPRU2KskCXwR7ZHl09TQXEntfEA8Jj2_Jyew +``` + + + diff --git a/docs/utility_functions.md b/docs/utility_functions.md new file mode 100644 index 000000000..2f9afbcf0 --- /dev/null +++ b/docs/utility_functions.md @@ -0,0 +1 @@ +::: cript.nodes.util diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 000000000..1f5a3e13b --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,135 @@ +site_name: CRIPT Python SDK + +repo_url: https://github.com/C-Accel-CRIPT/Python-SDK +repo_name: C-Accel-CRIPT/Python-SDK + +nav: + - Home: index.md + - Tutorial: + - CRIPT Installation Guide: tutorial/cript_installation_guide.md + - CRIPT API Token: tutorial/how_to_get_api_token.md + - Example Code Walkthrough: + - Synthesis: examples/synthesis.md + - Simulation: examples/simulation.md + - API Client: + - API: api/api.md + - Search Modes: api/search_modes.md + - Paginator: api/paginator.md + - Controlled Vocabulary Categories: api/controlled_vocabulary_categories.md + - Primary Nodes: + - Collection: nodes/primary_nodes/collection.md + - Computation: nodes/primary_nodes/computation.md + - Computation Process: nodes/primary_nodes/computation_process.md + - Data: nodes/primary_nodes/data.md + - Experiment: nodes/primary_nodes/experiment.md + - Inventory: nodes/primary_nodes/inventory.md + - Material: nodes/primary_nodes/material.md + - Project: nodes/primary_nodes/project.md + - Process: nodes/primary_nodes/process.md + - Reference: nodes/primary_nodes/reference.md + - Software: nodes/primary_nodes/software.md + - Sub-objects: + - Algorithm: nodes/subobjects/algorithm.md + - Citation: nodes/subobjects/citation.md + - Computational Forcefield: nodes/subobjects/computational_forcefield.md + - Condition: nodes/subobjects/condition.md + - Equipment: nodes/subobjects/equipment.md + # - Identifier: nodes/subobjects/identifier.md + - Ingredient: nodes/subobjects/ingredient.md + - Parameter: nodes/subobjects/parameter.md + - Property: nodes/subobjects/property.md + - Quantity: nodes/subobjects/quantity.md + - Software Configuration: nodes/subobjects/software_configuration.md + - Supporting Nodes: + - User: nodes/supporting_nodes/user.md + # - Group: nodes/supporting_nodes/group.md + - File: nodes/supporting_nodes/file.md + - Utility Functions: utility_functions.md + - Exceptions: + - API Exceptions: exceptions/api_exceptions.md + - Node Exceptions: exceptions/node_exceptions.md + - FAQ: faq.md + - Internal Wiki Documentation: https://github.com/C-Accel-CRIPT/Python-SDK/wiki + - CRIPT Python SDK Discussions: https://github.com/C-Accel-CRIPT/Python-SDK/discussions + +theme: + name: material + # below is the favicon image and documentation logo + logo: ./images/CRIPT_full_logo_colored_transparent.png + favicon: ./images/favicon.ico + icon: + admonition: + alert: octicons/alert-16 + features: + - content.code.copy + - navigation.path + - nagivation.tracking + - navigation.footer + + palette: + # Palette toggle for light mode + - media: "(prefers-color-scheme: light)" + scheme: default + primary: deep purple + accent: deep purple + toggle: + icon: material/brightness-7 + name: Switch to dark mode + # Palette toggle for dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: deep purple + accent: deep purple + toggle: + icon: material/brightness-4 + name: Switch to light mode + +# This links the CRIPT logo to the CRIPT homepage +extra: + homepage: https://criptapp.org +# social: +# - icon: fontawesome/brands/twitter +# link: https://twitter.com/squidfunk +# name: squidfunk on Twitter +copyright: © 2023 MIT | All Rights Reserved + +extra_css: + - extra.css + +plugins: + - search + - mkdocstrings: + default_handler: python + handlers: + python: + paths: [src, docs] + options: + show_bases: true + show_source: true + docstring_style: numpy + watch: + - src/ + +markdown_extensions: + - toc: + baselevel: 2 + permalink: True + - attr_list + - md_in_html + - admonition + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.snippets + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.tabbed: + alternate_style: true + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.emoji: + emoji_index: !!python/name:materialx.emoji.twemoji + emoji_generator: !!python/name:materialx.emoji.to_svg diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..96c8f431e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,26 @@ +[build-system] +requires = ["setuptools>=60", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 250 +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.ipynb + | \.mypy_cache + | \.pytest_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + | miniconda +)/ +''' + +[tool.ruff] +line-length = 250 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..d23e4db52 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +requests==2.31.0 +jsonschema==4.18.4 +boto3==1.28.17 +beartype==0.14.1 diff --git a/requirements_dev.txt b/requirements_dev.txt new file mode 100644 index 000000000..b71f79050 --- /dev/null +++ b/requirements_dev.txt @@ -0,0 +1,10 @@ +-r requirements.txt +black==23.7.0 +mypy==1.4.1 +pytest==7.4.0 +pytest-cov==4.1.0 +coverage==7.2.7 +types-jsonschema==4.17.0.9 +types-requests==2.31.0.1 +types-boto3==1.0.2 +deepdiff==6.3.1 diff --git a/requirements_docs.txt b/requirements_docs.txt new file mode 100644 index 000000000..7174ca504 --- /dev/null +++ b/requirements_docs.txt @@ -0,0 +1,5 @@ +mkdocs==1.5.1 +mkdocs-material==9.1.21 +mkdocstrings[python]==0.22.0 +pymdown-extensions==10.1 +jupytext==1.15.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 000000000..aa8906c52 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,32 @@ +[metadata] +name = cript +version = 2.0.0 +description = CRIPT Python SDK +long_description = file: README.md +long_description_content_type = text/markdown +author = CRIPT Development Team +url = https://github.com/C-Accel-CRIPT/Python-SDK +license = MIT +license_files = LICENSE.md +platforms = any +classifiers = + Development Status :: 3 - Alpha + Topic :: Scientific/Engineering + Programming Language :: Python :: 3 + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3.7 + +[options] +package_dir = + =src +packages = find: +python_requires = >=3.7 +include_package_data = True +install_requires = + requests==2.31.0 + jsonschema==4.17.3 + beartype==0.14.1 + boto3==1.26.151 + +[options.packages.find] +where = src \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..7f1a1763c --- /dev/null +++ b/setup.py @@ -0,0 +1,4 @@ +from setuptools import setup + +if __name__ == "__main__": + setup() diff --git a/src/cript/__init__.py b/src/cript/__init__.py new file mode 100644 index 000000000..e4d49922d --- /dev/null +++ b/src/cript/__init__.py @@ -0,0 +1,40 @@ +# trunk-ignore-all(ruff/F401) +# trunk-ignore-all(ruff/E402) + +# TODO fix beartype warning for real +from warnings import filterwarnings + +from beartype.roar import BeartypeDecorHintPep585DeprecationWarning + +filterwarnings("ignore", category=BeartypeDecorHintPep585DeprecationWarning) + +from cript.api import API, SearchModes, VocabCategories +from cript.exceptions import CRIPTException +from cript.nodes import ( + Algorithm, + Citation, + Collection, + Computation, + ComputationalForcefield, + ComputationProcess, + Condition, + Data, + Equipment, + Experiment, + File, + Ingredient, + Inventory, + Material, + NodeEncoder, + Parameter, + Process, + Project, + Property, + Quantity, + Reference, + Software, + SoftwareConfiguration, + User, + add_orphaned_nodes_to_project, + load_nodes_from_json, +) diff --git a/src/cript/api/__init__.py b/src/cript/api/__init__.py new file mode 100644 index 000000000..fb3229f5c --- /dev/null +++ b/src/cript/api/__init__.py @@ -0,0 +1,5 @@ +# trunk-ignore-all(ruff/F401) + +from cript.api.api import API +from cript.api.valid_search_modes import SearchModes +from cript.api.vocabulary_categories import VocabCategories diff --git a/src/cript/api/api.py b/src/cript/api/api.py new file mode 100644 index 000000000..1beb49b6a --- /dev/null +++ b/src/cript/api/api.py @@ -0,0 +1,918 @@ +import copy +import json +import logging +import os +import uuid +import warnings +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +import boto3 +import jsonschema +import requests +from beartype import beartype + +from cript.api.exceptions import ( + APIError, + CRIPTAPIRequiredError, + CRIPTAPISaveError, + CRIPTConnectionError, + InvalidHostError, + InvalidVocabulary, +) +from cript.api.paginator import Paginator +from cript.api.utils.get_host_token import resolve_host_and_token +from cript.api.utils.helper_functions import _get_node_type_from_json +from cript.api.utils.save_helper import ( + _fix_node_save, + _get_uuid_from_error_message, + _identify_suppress_attributes, + _InternalSaveValues, +) +from cript.api.utils.web_file_downloader import download_file_from_url +from cript.api.valid_search_modes import SearchModes +from cript.api.vocabulary_categories import VocabCategories +from cript.nodes.exceptions import CRIPTNodeSchemaError +from cript.nodes.primary_nodes.project import Project + +# Do not use this directly! That includes devs. +# Use the `_get_global_cached_api for access. +_global_cached_api = None + + +def _get_global_cached_api(): + """ + Read-Only access to the globally cached API object. + Raises an exception if no global API object is cached yet. + """ + if _global_cached_api is None: + raise CRIPTAPIRequiredError() + return _global_cached_api + + +class API: + """ + ## Definition + API Client class to communicate with the CRIPT API + + Attributes + ---------- + verbose : bool + A boolean flag that controls whether verbose logging is enabled or not. + + When `verbose` is set to `True`, the class will provide additional detailed logging + to the terminal. This can be useful for debugging and understanding the internal + workings of the class. + + When `verbose` is set to `False`, the class will only provide essential and concise + logging information, making the terminal output less cluttered and more user-friendly. + + ```python + # turn off the terminal logs + api.verbose = False + ``` + """ + + # dictates whether the user wants to see terminal log statements or not + verbose: bool = True + + _host: str = "" + _api_token: str = "" + _storage_token: str = "" + _http_headers: dict = {} + _vocabulary: dict = {} + _db_schema: dict = {} + _api_handle: str = "api" + _api_version: str = "v1" + + # trunk-ignore-begin(cspell) + # AWS S3 constants + _REGION_NAME: str = "us-east-1" + _IDENTITY_POOL_ID: str = "us-east-1:9426df38-994a-4191-86ce-3cb0ce8ac84d" + _COGNITO_LOGIN_PROVIDER: str = "cognito-idp.us-east-1.amazonaws.com/us-east-1_SZGBXPl2j" + _BUCKET_NAME: str = "cript-user-data" + _BUCKET_DIRECTORY_NAME: str = "python_sdk_files" + _internal_s3_client: Any = None # type: ignore + # trunk-ignore-end(cspell) + + @beartype + def __init__(self, host: Union[str, None] = None, api_token: Union[str, None] = None, storage_token: Union[str, None] = None, config_file_path: Union[str, Path] = ""): + """ + Initialize CRIPT API client with host and token. + Additionally, you can use a config.json file and specify the file path. + + !!! note "api client context manager" + It is necessary to use a `with` context manager for the API + + Examples + -------- + ### Create API client with host and token + ```Python + with cript.API( + host="https://api.criptapp.org/", + api_token="my api token", + storage_token="my storage token"), + ) as api: + # node creation, api.save(), etc. + ``` + + --- + + ### Creating API Client + !!! Warning "Token Security" + It is **highly** recommended that you store your API tokens in a safe location and read it into your code + Hard-coding API tokens directly into the code can pose security risks, + as the token might be exposed if the code is shared or stored in a version control system. + Anyone that has access to your tokens can impersonate you on the CRIPT platform + + ### Create API Client with + [Environment Variables](https://www.freecodecamp.org/news/python-env-vars-how-to-get-an-environment-variable-in-python/) + Another great way to keep sensitive information secure is by using + [environment variables](https://www.freecodecamp.org/news/python-env-vars-how-to-get-an-environment-variable-in-python/). + Sensitive information can be securely stored in environment variables and loaded into the code using + [os.getenv()](https://docs.python.org/3/library/os.html#os.getenv). + + #### Example + + ```python + import os + + # securely load sensitive data into the script + cript_host = os.getenv("cript_host") + cript_api_token = os.getenv("cript_api_token") + cript_storage_token = os.getenv("cript_storage_token") + + with cript.API(host=cript_host, api_token=cript_api_token, storage_token=cript_storage_token) as api: + # write your script + pass + ``` + + ### Create API Client with None + Alternatively you can configure your system to have an environment variable of + `CRIPT_TOKEN` for the API token and `CRIPT_STORAGE_TOKEN` for the storage token, then + initialize `cript.API` `api_token` and `storage_token` with `None`. + + The CRIPT Python SDK will try to read the API Token and Storage token from your system's environment variables. + + ```python + with cript.API(host=cript_host, api_token=None, storage_token=None) as api: + # write your script + pass + ``` + + ### Create API client with config.json + `config.json` + ```json + { + "host": "https://api.criptapp.org/", + "api_token": "I am API token", + "storage_token": "I am storage token" + } + ``` + + `my_script.py` + ```python + from pathlib import Path + + # create a file path object of where the config file is + config_file_path = Path(__file__) / Path('./config.json') + + with cript.API(config_file_path=config_file_path) as api: + # node creation, api.save(), etc. + ``` + + Parameters + ---------- + host : str, None + CRIPT host for the Python SDK to connect to such as https://api.criptapp.org/` + This host address is the same address used to login to cript website. + If `None` is specified, the host is inferred from the environment variable `CRIPT_HOST`. + api_token : str, None + CRIPT API Token used to connect to CRIPT and upload all data with the exception to file upload that needs + a different token. + You can find your personal token on the cript website at User > Security Settings. + The user icon is in the top right. + If `None` is specified, the token is inferred from the environment variable `CRIPT_TOKEN`. + storage_token: str + This token is used to upload local files to CRIPT cloud storage when needed + config_file_path: str + the file path to the config.json file where the token and host can be found + + + Notes + ----- + * if `host=None` and `token=None` + then the Python SDK will grab the host from the users environment variable of `"CRIPT_HOST"` + and `"CRIPT_TOKEN"` + + Warns + ----- + UserWarning + If `host` is using "http" it gives the user a warning that HTTP is insecure and the user should use HTTPS + + Raises + ------ + CRIPTConnectionError + If it cannot connect to CRIPT with the provided host and token a CRIPTConnectionError is thrown. + + Returns + ------- + None + Instantiate a new CRIPT API object + """ + + # if there is a config.json file or any of the parameters are None, then get the variables from file or env vars + if config_file_path or (host is None or api_token is None or storage_token is None): + authentication_dict: Dict[str, str] = resolve_host_and_token(host, api_token=api_token, storage_token=storage_token, config_file_path=config_file_path) + + host = authentication_dict["host"] + api_token = authentication_dict["api_token"] + storage_token = authentication_dict["storage_token"] + + self._host = self._prepare_host(host=host) # type: ignore + self._api_token = api_token # type: ignore + self._storage_token = storage_token # type: ignore + + # add Bearer to token for HTTP requests + self._http_headers = {"Authorization": f"Bearer {self._api_token}", "Content-Type": "application/json"} + + # check that api can connect to CRIPT with host and token + self._check_initial_host_connection() + + self._get_db_schema() + + def __str__(self) -> str: + """ + States the host of the CRIPT API client + + Returns + ------- + str + """ + return f"CRIPT API Client - Host URL: '{self.host}'" + + @beartype + def _prepare_host(self, host: str) -> str: + # strip ending slash to make host always uniform + host = host.rstrip("/") + host = f"{host}/{self._api_handle}/{self._api_version}" + + # if host is using unsafe "http://" then give a warning + if host.startswith("http://"): + warnings.warn("HTTP is an unsafe protocol please consider using HTTPS.") + + if not host.startswith("http"): + raise InvalidHostError() + + return host + + # Use a property to ensure delayed init of s3_client + @property + def _s3_client(self) -> boto3.client: # type: ignore + """ + creates or returns a fully authenticated and ready s3 client + + Returns + ------- + s3_client: boto3.client + fully prepared and authenticated s3 client ready to be used throughout the script + """ + + if self._internal_s3_client is None: + auth = boto3.client("cognito-identity", region_name=self._REGION_NAME) + identity_id = auth.get_id(IdentityPoolId=self._IDENTITY_POOL_ID, Logins={self._COGNITO_LOGIN_PROVIDER: self._storage_token}) + # TODO remove this temporary fix to the token, by getting is from back end. + aws_token = self._storage_token + + aws_credentials = auth.get_credentials_for_identity(IdentityId=identity_id["IdentityId"], Logins={self._COGNITO_LOGIN_PROVIDER: aws_token}) + aws_credentials = aws_credentials["Credentials"] + s3_client = boto3.client( + "s3", + aws_access_key_id=aws_credentials["AccessKeyId"], + aws_secret_access_key=aws_credentials["SecretKey"], + aws_session_token=aws_credentials["SessionToken"], + ) + self._internal_s3_client = s3_client + return self._internal_s3_client + + def __enter__(self): + self.connect() + return self + + @beartype + def __exit__(self, type, value, traceback): + self.disconnect() + + def connect(self): + """ + Connect this API globally as the current active access point. + It is not necessary to call this function manually if a context manager is used. + A context manager is preferred where possible. + Jupyter notebooks are a use case where this connection can be handled manually. + If this function is called manually, the `API.disconnect` function has to be called later. + + For manual connection: nested API object are discouraged. + """ + # Store the last active global API (might be None) + global _global_cached_api + self._previous_global_cached_api = copy.copy(_global_cached_api) + _global_cached_api = self + return self + + def disconnect(self): + """ + Disconnect this API from the active access point. + It is not necessary to call this function manually if a context manager is used. + A context manager is preferred where possible. + Jupyter notebooks are a use case where this connection can be handled manually. + This function has to be called manually if the `API.connect` function has to be called before. + + For manual connection: nested API object are discouraged. + """ + # Restore the previously active global API (might be None) + global _global_cached_api + _global_cached_api = self._previous_global_cached_api + + @property + def schema(self): + """ + Access the CRIPT Database Schema that is associated with this API connection. + The CRIPT Database Schema is used to validate a node's JSON so that it is compatible with the CRIPT API. + """ + return self._db_schema + + @property + def host(self): + """ + Read only access to the currently connected host. + + The term "host" designates the specific CRIPT instance to which you intend to upload your data. + + For most users, the host will be `criptapp.org` + + ```yaml + host: criptapp.org + ``` + + Examples + -------- + ```python + print(cript_api.host) + ``` + Output + ```Python + https://api.criptapp.org/api/v1 + ``` + """ + return self._host + + def _check_initial_host_connection(self) -> None: + """ + tries to create a connection with host and if the host does not respond or is invalid it raises an error + + Raises + ------- + CRIPTConnectionError + raised when the host does not give the expected response + + Returns + ------- + None + """ + try: + pass + except Exception as exc: + raise CRIPTConnectionError(self.host, self._api_token) from exc + + def _get_vocab(self) -> dict: + """ + gets the entire CRIPT controlled vocabulary and stores it in _vocabulary + + 1. loops through all controlled vocabulary categories + 1. if the category already exists in the controlled vocabulary then skip that category and continue + 1. if the category does not exist in the `_vocabulary` dict, + then request it from the API and append it to the `_vocabulary` dict + 1. at the end the `_vocabulary` should have all the controlled vocabulary and that will be returned + + Examples + -------- + The vocabulary looks like this + ```json + {'algorithm_key': + [ + { + 'description': "Velocity-Verlet integration algorithm. Parameters: 'integration_timestep'.", + 'name': 'velocity_verlet' + }, + } + ``` + """ + + # loop through all vocabulary categories and make a request to each vocabulary category + # and put them all inside of self._vocab with the keys being the vocab category name + for category in VocabCategories: + if category in self._vocabulary: + continue + + self._vocabulary[category.value] = self.get_vocab_by_category(category) + + return self._vocabulary + + @beartype + def get_vocab_by_category(self, category: VocabCategories) -> List[dict]: + """ + get the CRIPT controlled vocabulary by category + + Parameters + ---------- + category: str + category of + + Returns + ------- + List[dict] + list of JSON containing the controlled vocabulary + """ + + # check if the vocabulary category is already cached + if category.value in self._vocabulary: + return self._vocabulary[category.value] + + # if vocabulary category is not in cache, then get it from API and cache it + response = requests.get(f"{self.host}/cv/{category.value}/").json() + + if response["code"] != 200: + # TODO give a better CRIPT custom Exception + raise Exception(f"while getting controlled vocabulary from CRIPT for {category}, " f"the API responded with http {response} ") + + # add to cache + self._vocabulary[category.value] = response["data"] + + return self._vocabulary[category.value] + + @beartype + def _is_vocab_valid(self, vocab_category: VocabCategories, vocab_word: str) -> bool: + """ + checks if the vocabulary is valid within the CRIPT controlled vocabulary. + Either returns True or InvalidVocabulary Exception + + 1. if the vocabulary is custom (starts with "+") + then it is automatically valid + 2. if vocabulary is not custom, then it is checked against its category + if the word cannot be found in the category then it returns False + + Parameters + ---------- + vocab_category: VocabCategories + ControlledVocabularyCategories enums + vocab_word: str + the vocabulary word e.g. "CAS", "SMILES", "BigSmiles", "+my_custom_key" + + Returns + ------- + a boolean of if the vocabulary is valid + + Raises + ------ + InvalidVocabulary + If the vocabulary is invalid then the error gets raised + """ + + # check if vocab is custom + # This is deactivated currently, no custom vocab allowed. + if vocab_word.startswith("+"): + return True + + # get the entire vocabulary + controlled_vocabulary = self._get_vocab() + # get just the category needed + controlled_vocabulary = controlled_vocabulary[vocab_category.value] + + # TODO this can be faster with a dict of dicts that can do o(1) look up + # looping through an unsorted list is an O(n) look up which is slow + # loop through the list + for vocab_dict in controlled_vocabulary: + # check the name exists within the dict + if vocab_dict.get("name") == vocab_word: + return True + + raise InvalidVocabulary(vocab=vocab_word, possible_vocab=list(controlled_vocabulary)) + + def _get_db_schema(self) -> dict: + """ + Sends a GET request to CRIPT to get the database schema and returns it. + The database schema can be used for validating the JSON request + before submitting it to CRIPT. + + 1. checks if the db schema is already set + * if already exists then it skips fetching it from the API and just returns what it already has + 2. if db schema has not been set yet, then it fetches it from the API + * after getting it from the API it saves it in the `_schema` class variable, + so it can be easily and efficiently gotten next time + """ + + # check if db schema is already saved + if bool(self._db_schema): + return self._db_schema + + # fetch db_schema from API + else: + # fetch db schema, get the JSON body of it, and get the data of that JSON + response = requests.get(url=f"{self.host}/schema/").json() + + if response["code"] != 200: + raise APIError(api_error=response.json()) + + # get the data from the API JSON response + self._db_schema = response["data"] + return self._db_schema + + @beartype + def _is_node_schema_valid(self, node_json: str, is_patch: bool = False) -> bool: + """ + checks a node JSON schema against the db schema to return if it is valid or not. + + 1. get db schema + 1. convert node_json str to dict + 1. take out the node type from the dict + 1. "node": ["material"] + 1. use the node type from dict to tell the db schema which node schema to validate against + 1. Manipulates the string to be title case to work with db schema + + Parameters + ---------- + node_json: str + a node in JSON form string + is_patch: bool + a boolean flag checking if it needs to validate against `NodePost` or `NodePatch` + + Notes + ----- + This function does not take into consideration vocabulary validation. + For vocabulary validation please check `is_vocab_valid` + + Raises + ------ + CRIPTNodeSchemaError + in case a node is invalid + + Returns + ------- + bool + whether the node JSON is valid or not + """ + + db_schema = self._get_db_schema() + + node_type: str = _get_node_type_from_json(node_json=node_json) + + node_dict = json.loads(node_json) + + if self.verbose: + # logging out info to the terminal for the user feedback + # (improve UX because the program is currently slow) + logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) + logging.info(f"Validating {node_type} graph...") + + # set the schema to test against http POST or PATCH of DB Schema + schema_http_method: str + + if is_patch: + schema_http_method = "Patch" + else: + schema_http_method = "Post" + + # set which node you are using schema validation for + db_schema["$ref"] = f"#/$defs/{node_type}{schema_http_method}" + + try: + jsonschema.validate(instance=node_dict, schema=db_schema) + except jsonschema.exceptions.ValidationError as error: + raise CRIPTNodeSchemaError(node_type=node_dict["node"], json_schema_validation_error=str(error)) from error + + # if validation goes through without any problems return True + return True + + def save(self, project: Project) -> None: + """ + This method takes a project node, serializes the class into JSON + and then sends the JSON to be saved to the API. + It takes Project node because everything is connected to the Project node, + and it can be used to send either a POST or PATCH request to API + + Parameters + ---------- + project: Project + the Project Node that the user wants to save + + Raises + ------ + CRIPTAPISaveError + If the API responds with anything other than an HTTP of `200`, the API error is displayed to the user + + Returns + ------- + A set of extra saved node UUIDs. + Just sends a `POST` or `Patch` request to the API + """ + try: + self._internal_save(project) + except CRIPTAPISaveError as exc: + if exc.pre_saved_nodes: + for node_uuid in exc.pre_saved_nodes: + # TODO remove all pre-saved nodes by their uuid. + pass + raise exc from exc + + def _internal_save(self, node, save_values: Optional[_InternalSaveValues] = None) -> _InternalSaveValues: + """ + Internal helper function that handles the saving of different nodes (not just project). + + If a "Bad UUID" error happens, we find that node with the UUID and save it first. + Then we recursively call the _internal_save again. + Because it is recursive, this repeats until no "Bad UUID" error happen anymore. + This works, because we keep track of "Bad UUID" handled nodes, and represent them in the JSON only as the UUID. + """ + + if save_values is None: + save_values = _InternalSaveValues() + + # saves all the local files to cloud storage right before saving the Project node + # Ensure that all file nodes have uploaded there payload before actual save. + for file_node in node.find_children({"node": ["File"]}): + file_node.ensure_uploaded(api=self) + + node.validate() + + # Dummy response to have a virtual do-while loop, instead of while loop. + response = {"code": -1} + # TODO remove once get works properly + force_patch = False + + while response["code"] != 200: + # Keep a record of how the state was before the loop + old_save_values = copy.deepcopy(save_values) + # We assemble the JSON to be saved to back end. + # Note how we exclude pre-saved uuid nodes. + json_data = node.get_json(known_uuid=save_values.saved_uuid, suppress_attributes=save_values.suppress_attributes).json + + # This checks if the current node exists on the back end. + # if it does exist we use `patch` if it doesn't `post`. + test_get_response: Dict = requests.get(url=f"{self._host}/{node.node_type_snake_case}/{str(node.uuid)}/", headers=self._http_headers).json() + patch_request = test_get_response["code"] == 200 + + # TODO remove once get works properly + if not patch_request and force_patch: + patch_request = True + force_patch = False + # TODO activate patch validation + # node.validate(is_patch=patch_request) + + # If all that is left is a UUID, we don't need to save it, we can just exit the loop. + if patch_request and len(json.loads(json_data)) == 1: + response = {"code": 200} + break + + if patch_request: + response: Dict = requests.patch(url=f"{self._host}/{node.node_type_snake_case}/{str(node.uuid)}/", headers=self._http_headers, data=json_data).json() # type: ignore + else: + response: Dict = requests.post(url=f"{self._host}/{node.node_type_snake_case}/", headers=self._http_headers, data=json_data).json() # type: ignore + + # If we get an error we may be able to fix, we to handle this extra and save the bad node first. + # Errors with this code, may be fixable + if response["code"] in (400, 409): + returned_save_values = _fix_node_save(self, node, response, save_values) + save_values += returned_save_values + + # Handle errors from patching with too many attributes + if patch_request and response["code"] in (400,): + suppress_attributes = _identify_suppress_attributes(node, response) + new_save_values = _InternalSaveValues(save_values.saved_uuid, suppress_attributes) + save_values += new_save_values + + # It is only worthwhile repeating the attempted save loop if our state has improved. + # Aka we did something to fix the occurring error + if not save_values > old_save_values: + # TODO remove once get works properly + if not patch_request and response["code"] == 409 and response["error"].strip().startswith("Duplicate uuid:"): # type: ignore + duplicate_uuid = _get_uuid_from_error_message(response["error"]) # type: ignore + if str(node.uuid) == duplicate_uuid: + force_patch = True + continue + + break + + if response["code"] != 200: + raise CRIPTAPISaveError(api_host_domain=self._host, http_code=response["code"], api_response=response["error"], patch_request=patch_request, pre_saved_nodes=save_values.saved_uuid, json_data=json_data) # type: ignore + + save_values.saved_uuid.add(str(node.uuid)) + return save_values + + def upload_file(self, file_path: Union[Path, str]) -> str: + # trunk-ignore-begin(cspell) + """ + uploads a file to AWS S3 bucket and returns a URL of the uploaded file in AWS S3 + The URL is has no expiration time limit and is available forever + + 1. take a file path of type path or str to the file on local storage + * see Example for more details + 1. convert the file path to pathlib object, so it is versatile and + always uniform regardless if the user passes in a str or path object + 1. get the file + 1. rename the file to avoid clash or overwriting of previously uploaded files + * change file name to `original_name_uuid4.extension` + * `document_42926a201a624fdba0fd6271defc9e88.txt` + 1. upload file to AWS S3 + 1. get the link of the uploaded file and return it + + + Parameters + ---------- + file_path: Union[str, Path] + file path as str or Path object. Path Object is recommended + + Examples + -------- + ```python + import cript + + api = cript.API(host, token) + + # programmatically create the absolute path of your file, so the program always works correctly + my_file_path = (Path(__file__) / Path('../upload_files/my_file.txt')).resolve() + + my_file_s3_url = api.upload_file(absolute_file_path=my_file_path) + ``` + + Raises + ------ + FileNotFoundError + In case the CRIPT Python SDK cannot find the file on your computer because the file does not exist + or the path to it is incorrect it raises + [FileNotFoundError](https://docs.python.org/3/library/exceptions.html#FileNotFoundError) + + Returns + ------- + object_name: str + object_name of the AWS S3 uploaded file to be put into the File node source attribute + """ + # trunk-ignore-end(cspell) + + # TODO consider using a new variable when converting `file_path` from parameter + # to a Path object with a new type + # convert file path from whatever the user passed in to a pathlib object + file_path = Path(file_path).resolve() + + # get file_name and file_extension from absolute file path + # file_extension includes the dot, e.g. ".txt" + file_name, file_extension = os.path.splitext(os.path.basename(file_path)) + + # generate a UUID4 string without dashes, making a cleaner file name + uuid_str: str = str(uuid.uuid4().hex) + + new_file_name: str = f"{file_name}_{uuid_str}{file_extension}" + + # e.g. "directory/file_name_uuid.extension" + object_name: str = f"{self._BUCKET_DIRECTORY_NAME}/{new_file_name}" + + # upload file to AWS S3 + self._s3_client.upload_file(Filename=file_path, Bucket=self._BUCKET_NAME, Key=object_name) # type: ignore + + # return the object_name within AWS S3 for easy retrieval + return object_name + + @beartype + def download_file(self, file_source: str, destination_path: str = ".") -> None: + """ + Download a file from CRIPT Cloud Storage (AWS S3) and save it to the specified path. + + ??? Info "Cloud Storage vs Web URL File Download" + + If the `object_name` does not starts with `http` then the program assumes the file is in AWS S3 storage, + and attempts to retrieve it via + [boto3 client](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html). + + If the `object_name` starts with `http` then the program knows that + it is a file stored on the web. The program makes a simple + [GET](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/GET) request to get the file, + then writes the contents of it to the specified destination. + + > Note: The current version of the program is designed to download files from the web in a straightforward + manner. However, please be aware that the program may encounter limitations when dealing with URLs that + require JavaScript or a session to be enabled. In such cases, the download method may fail. + + > We acknowledge these limitations and plan to enhance the method in future versions to ensure compatibility + with a wider range of web file URLs. Our goal is to develop a robust solution capable of handling any and + all web file URLs. + + Parameters + ---------- + file_source: str + `object_name`: file downloaded via object_name from cloud storage and saved to local storage + object_name e.g. `"Data/{file_name}"` + --- + `URL file source`: If the file source starts with `http` then it is downloaded via `GET` request and + saved to local storage + URL file source e.g. `https://criptscripts.org/cript_graph_json/JSON/cao_protein.json` + destination_path: str + please provide a path with file name of where you would like the file to be saved + on local storage. + > If no path is specified, then by default it will download the file + to the current working directory. + + > The destination path must include a file name and file extension + e.g.: `~/Desktop/my_example_file_name.extension` + + Examples + -------- + ```python + from pathlib import Path + + desktop_path = (Path(__file__).parent / "cript_downloads" / "my_downloaded_file.txt").resolve() + cript_api.download_file(file_url=my_file_source, destination_path=desktop_path) + ``` + + Raises + ------ + FileNotFoundError + In case the file could not be found because the file does not exist or the path given is incorrect + + Returns + ------- + None + Simply downloads the file + """ + + # if the file source is a URL + if file_source.startswith("http"): + download_file_from_url(url=file_source, destination_path=Path(destination_path).resolve()) + return + + # the file is stored in cloud storage and must be retrieved via object_name + self._s3_client.download_file(Bucket=self._BUCKET_NAME, Key=file_source, Filename=destination_path) # type: ignore + + @beartype + def search( + self, + node_type, + search_mode: SearchModes, + value_to_search: Union[None, str], + ) -> Paginator: + """ + This method is used to perform search on the CRIPT platform. + + Examples + -------- + ```python + # search by node type + materials_paginator = cript_api.search( + node_type=cript.Material, + search_mode=cript.SearchModes.NODE_TYPE, + value_to_search=None, + ) + ``` + + Parameters + ---------- + node_type : UUIDBaseNode + Type of node that you are searching for. + search_mode : SearchModes + Type of search you want to do. You can search by name, `UUID`, `EXACT_NAME`, etc. + Refer to [valid search modes](../search_modes) + value_to_search : Union[str, None] + What you are searching for can be either a value, and if you are only searching for + a `NODE_TYPE`, then this value can be empty or `None` + + Returns + ------- + Paginator + paginator object for the user to use to flip through pages of search results + """ + + # get node typ from class + node_type = node_type.node_type_snake_case + + # always putting a page parameter of 0 for all search URLs + page_number = 0 + + api_endpoint: str = "" + + # requesting a page of some primary node + if search_mode == SearchModes.NODE_TYPE: + api_endpoint = f"{self._host}/{node_type}" + + elif search_mode == SearchModes.CONTAINS_NAME: + api_endpoint = f"{self._host}/search/{node_type}" + + elif search_mode == SearchModes.EXACT_NAME: + api_endpoint = f"{self._host}/search/exact/{node_type}" + + elif search_mode == SearchModes.UUID: + api_endpoint = f"{self._host}/{node_type}/{value_to_search}" + # putting the value_to_search in the URL instead of a query + value_to_search = None + + assert api_endpoint != "" + + # TODO error handling if none of the API endpoints got hit + return Paginator(http_headers=self._http_headers, api_endpoint=api_endpoint, query=value_to_search, current_page_number=page_number) diff --git a/src/cript/api/exceptions.py b/src/cript/api/exceptions.py new file mode 100644 index 000000000..0dd7062c0 --- /dev/null +++ b/src/cript/api/exceptions.py @@ -0,0 +1,209 @@ +from typing import List, Optional, Set + +from cript.exceptions import CRIPTException + + +class CRIPTConnectionError(CRIPTException): + """ + ## Definition + Raised when the cript.API object cannot connect to CRIPT with the given host and token + + ## How to Fix + The best way to fix this error is to check that your host and token are written and used correctly within + the cript.API object. This error could also be shown if the API is unresponsive and the cript.API object + just cannot successfully connect to it. + """ + + def __init__(self, host, token): + self.host = host + # Do not store full token in stack trace for security reasons + uncovered_chars = len(token) // 4 + self.token = token[:uncovered_chars] + self.token += "*" * (len(token) - 2 * uncovered_chars) + self.token += token[-uncovered_chars:] + + def __str__(self) -> str: + error_message = f"Could not connect to CRIPT with the given host ({self.host}) and token ({self.token}). " f"Please be sure both host and token are entered correctly." + + return error_message + + +# TODO refactor +class InvalidVocabulary(CRIPTException): + """ + Raised when the CRIPT controlled vocabulary is invalid + """ + + vocab: str = "" + possible_vocab: List[str] = [] + + def __init__(self, vocab: str, possible_vocab: List[str]) -> None: + self.vocab = vocab + self.possible_vocab = possible_vocab + + def __str__(self) -> str: + error_message = f"The vocabulary '{self.vocab}' entered does not exist within the CRIPT controlled vocabulary." f" Please pick a valid CRIPT vocabulary from {self.possible_vocab}" + return error_message + + +class InvalidVocabularyCategory(CRIPTException): + """ + Raised when the CRIPT controlled vocabulary category is unknown + and gives the user a list of all valid vocabulary categories + """ + + def __init__(self, vocab_category: str, valid_vocab_category: List[str]): + self.vocab_category = vocab_category + self.valid_vocab_category = valid_vocab_category + + def __str__(self) -> str: + error_message = f"The vocabulary category {self.vocab_category} does not exist within the CRIPT controlled vocabulary. " f"Please pick a valid CRIPT vocabulary category from {self.valid_vocab_category}." + + return error_message + + +class CRIPTAPIRequiredError(CRIPTException): + """ + ## Definition + Exception to be raised when the API object is requested, but no cript.API object exists yet. + + The CRIPT Python SDK relies on a cript.API object for creation, validation, and modification of nodes. + The cript.API object may be explicitly called by the user to perform operations to the API, or + implicitly called by the Python SDK under the hood to perform some sort of validation. + + ## How to Fix + To fix this error please instantiate an api object + + ```python + import cript + + my_host = "https://api.criptapp.org/" + my_token = "123456" # To use your token securely, please consider using environment variables + + my_api = cript.API(host=my_host, token=my_token) + ``` + """ + + def __init__(self): + pass + + def __str__(self) -> str: + error_message = "cript.API object is required for an operation, but it does not exist." "Please instantiate a cript.API object to continue." "See the documentation for more details." + + return error_message + + +class CRIPTAPISaveError(CRIPTException): + """ + ## Definition + CRIPTAPISaveError is raised when the API responds with a http status code that is anything other than 200. + The status code and API response is shown to the user to help them debug the issue. + + ## How to Fix + This error is more of a case by case basis, but the best way to approach it to understand that the + CRIPT Python SDK sent an HTTP POST request with a giant JSON in the request body + to the CRIPT API. The API then read that request, and it responded with some sort of error either + to the that JSON or how the request was sent. + """ + + api_host_domain: str + http_code: str + api_response: str + + def __init__(self, api_host_domain: str, http_code: str, api_response: str, patch_request: bool, pre_saved_nodes: Optional[Set[str]] = None, json_data: Optional[str] = None): + self.api_host_domain = api_host_domain + self.http_code = http_code + self.api_response = api_response + self.patch_request = patch_request + self.pre_saved_nodes = pre_saved_nodes + self.json_data = json_data + + def __str__(self) -> str: + type = "POST" + if self.patch_request: + type = "PATCH" + error_message = f"API responded to {type} with 'http:{self.http_code} {self.api_response}'" + if self.json_data: + error_message += f" data: {self.json_data}" + + return error_message + + +class InvalidHostError(CRIPTException): + """ + ## Definition + Exception is raised when the host given to the API is invalid + + ## How to Fix + This is a simple error to fix, simply put `http://` or preferably `https://` in front of your domain + when passing in the host to the cript.API class such as `https://api.criptapp.org/` + + Currently, the only web protocol that is supported with the CRIPT Python SDK is `HTTP`. + + ### Example + ```python + import cript + + my_valid_host = "https://api.criptapp.org/" + my_token = "123456" # To use your token securely, please consider using environment variables + + my_api = cript.API(host=my_valid_host, token=my_token) + ``` + + Warnings + -------- + Please consider always using [HTTPS](https://developer.mozilla.org/en-US/docs/Glossary/HTTPS) + as that is a secure protocol and avoid using `HTTP` as it is insecure. + The CRIPT Python SDK will give a warning in the terminal when it detects a host with `HTTP` + + + """ + + def __init__(self) -> None: + pass + + def __str__(self) -> str: + return "The host must start with http or https" + + +class APIError(CRIPTException): + """ + ## Definition + This is a generic error made to display API errors to the user to troubleshoot. + + ## How to Fix + Please keep in mind that the CRIPT Python SDK turns the [Project](../../nodes/primary_nodes/project) + node into a giant JSON and sends that to the API to be processed. If there are any errors while processing + the giant JSON generated by the CRIPT Python SDK, then the API will return an error about the http request + and the JSON sent to it. Therefore, the error shown might be an error within the JSON and not particular + within the Python code that was created + + The best way to trouble shoot this is to figure out what the API error means and figure out where + in the Python SDK this error occurred and what have been the reason under the hood. + """ + + api_error: str = "" + + def __init__(self, api_error: str) -> None: + self.api_error = api_error + + def __str__(self) -> str: + error_message: str = f"The API responded with {self.api_error}" + + return error_message + + +class FileDownloadError(CRIPTException): + """ + ## Definition + This error is raised when the API wants to download a file from an AWS S3 URL + via the `cript.API.download_file()` method, but the status is something other than 200. + """ + + error_message: str = "" + + def __init__(self, error_message: str) -> None: + self.error_message = error_message + + def __str__(self) -> str: + return self.error_message diff --git a/src/cript/api/paginator.py b/src/cript/api/paginator.py new file mode 100644 index 000000000..50e7ab601 --- /dev/null +++ b/src/cript/api/paginator.py @@ -0,0 +1,221 @@ +from typing import List, Optional, Union +from urllib.parse import quote + +import requests +from beartype import beartype + + +class Paginator: + """ + Paginator is used to flip through different pages of data that the API returns when searching. + > Instead of the user manipulating the URL and parameters, this object handles all of that for them. + + When conducting any kind of search the API returns pages of data and each page contains 10 results. + This is equivalent to conducting a Google search when Google returns a limited number of links on the first page + and all other results are on the next pages. + + Using the Paginator object, the user can simply and easily flip through the pages of data the API provides. + + !!! Warning "Do not create paginator objects" + Please note that you are not required or advised to create a paginator object, and instead the + Python SDK API object will create a paginator for you, return it, and let you simply use it + + + Attributes + ---------- + current_page_results: List[dict] + List of JSON dictionary results returned from the API + ```python + [{result 1}, {result 2}, {result 3}, ...] + ``` + """ + + _http_headers: dict + + api_endpoint: str + + # if query or page number are None, then it means that api_endpoint does not allow for whatever that is None + # and that is not added to the URL + # by default the page_number and query are `None` and they can get filled in + query: Union[str, None] + _current_page_number: int + + current_page_results: List[dict] + + @beartype + def __init__( + self, + http_headers: dict, + api_endpoint: str, + query: Optional[str] = None, + current_page_number: int = 0, + ): + """ + create a paginator + + 1. set all the variables coming into constructor + 1. then prepare any variable as needed e.g. strip extra spaces or url encode query + + Parameters + ---------- + http_headers: dict + get already created http headers from API and just use them in paginator + api_endpoint: str + api endpoint to send the search requests to + it already contains what node the user is looking for + current_page_number: int + page number to start from. Keep track of current page for user to flip back and forth between pages of data + query: str + the value the user is searching for + + Returns + ------- + None + instantiate a paginator + """ + self._http_headers = http_headers + self.api_endpoint = api_endpoint + self.query = query + self._current_page_number = current_page_number + + # check if it is a string and not None to avoid AttributeError + if api_endpoint is not None: + # strip the ending slash "/" to make URL uniform and any trailing spaces from either side + self.api_endpoint = api_endpoint.rstrip("/").strip() + + # check if it is a string and not None to avoid AttributeError + if query is not None: + # URL encode query + self.query = quote(query) + + self.fetch_page_from_api() + + def next_page(self): + """ + flip to the next page of data. + + Examples + -------- + ```python + my_paginator.next_page() + ``` + """ + self.current_page_number += 1 + + def previous_page(self): + """ + flip to the next page of data. + + Examples + -------- + ```python + my_paginator.previous_page() + ``` + """ + self.current_page_number -= 1 + + @property + @beartype + def current_page_number(self) -> int: + """ + get the current page number that you are on. + + Setting the page will take you to that specific page of results + + Examples + -------- + ```python + my_paginator.current_page = 10 + ``` + + Returns + ------- + current page number: int + the current page number of the data + """ + return self._current_page_number + + @current_page_number.setter + @beartype + def current_page_number(self, new_page_number: int) -> None: + """ + flips to a specific page of data that has been requested + + sets the current_page_number and then sends the request to the API and gets the results of this page number + + Parameters + ---------- + new_page_number (int): specific page of data that the user wants to go to + + Examples + -------- + requests.get("https://api.criptapp.org//api?page=2) + requests.get(f"{self.query}?page={self.current_page_number - 1}") + + Raises + -------- + InvalidPageRequest, in case the user tries to get a negative page or a page that doesn't exist + """ + if new_page_number < 0: + error_message: str = f"Paginator current page number is invalid because it is negative: " f"{self.current_page_number} please set paginator.current_page_number " f"to a positive page number" + + # TODO replace with custom error + raise Exception(error_message) + + else: + self._current_page_number = new_page_number + # when new page number is set, it is then fetched from the API + self.fetch_page_from_api() + + @beartype + def fetch_page_from_api(self) -> List[dict]: + """ + 1. builds the URL from the query and page number + 1. makes the request to the API + 1. API responds with a JSON that has data or JSON that has data and result + 1. parses it and correctly sets the current_page_results property + + Raises + ------ + InvalidSearchRequest + In case the API responds with an error + + Returns + ------- + current page results: List[dict] + makes a request to the API and gets a page of data + """ + + # temporary variable to not overwrite api_endpoint + temp_api_endpoint: str = self.api_endpoint + + if self.query is not None: + temp_api_endpoint = f"{temp_api_endpoint}/?q={self.query}" + + elif self.query is None: + temp_api_endpoint = f"{temp_api_endpoint}/?q=" + + temp_api_endpoint = f"{temp_api_endpoint}&page={self.current_page_number}" + + response = requests.get( + url=temp_api_endpoint, + headers=self._http_headers, + ).json() + + # handling both cases in case there is result inside of data or just data + try: + self.current_page_results = response["data"]["result"] + except KeyError: + self.current_page_results = response["data"] + except TypeError: + self.current_page_results = response["data"] + + if response["code"] == 404 and response["error"] == "The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.": + self.current_page_results = [] + return self.current_page_results + + # TODO give a CRIPT error if HTTP response is anything other than 200 + if response["code"] != 200: + raise Exception(f"API responded with: {response['error']}") + + return self.current_page_results diff --git a/src/cript/api/utils/__init__.py b/src/cript/api/utils/__init__.py new file mode 100644 index 000000000..50e0528bb --- /dev/null +++ b/src/cript/api/utils/__init__.py @@ -0,0 +1,4 @@ +# trunk-ignore-all(ruff/F401) + +from .get_host_token import resolve_host_and_token +from .helper_functions import _get_node_type_from_json diff --git a/src/cript/api/utils/get_host_token.py b/src/cript/api/utils/get_host_token.py new file mode 100644 index 000000000..9d36ff550 --- /dev/null +++ b/src/cript/api/utils/get_host_token.py @@ -0,0 +1,62 @@ +import json +import os +from pathlib import Path +from typing import Dict + + +def resolve_host_and_token(host, api_token, storage_token, config_file_path) -> Dict[str, str]: + """ + resolves the host and token after passed into the constructor if it comes from env vars or config file + + ## priority level + 1. config file + 1. environment variable + 1. direct host and token + + Returns + ------- + Dict[str, str] + dict of host and token + """ + if config_file_path: + # convert str path or path object + config_file_path = Path(config_file_path).resolve() + + # TODO the reading from config file can be separated into another function + # read host and token from config.json + with open(config_file_path, "r") as file_handle: + config_file: Dict[str, str] = json.loads(file_handle.read()) + # set api host and token + host = config_file["host"] + api_token = config_file["api_token"] + storage_token = config_file["storage_token"] + + return {"host": host, "api_token": api_token, "storage_token": storage_token} + + # if host and token is none then it will grab host and token from user's environment variables + if host is None: + host = _read_env_var(env_var_name="CRIPT_HOST") + + if api_token is None: + api_token = _read_env_var(env_var_name="CRIPT_TOKEN") + + if storage_token is None: + storage_token = _read_env_var(env_var_name="CRIPT_STORAGE_TOKEN") + + return {"host": host, "api_token": api_token, "storage_token": storage_token} + + +def _read_env_var(env_var_name: str) -> str: + """ + reads the host or token from the env vars called `CRIPT_HOST` or `CRIPT_TOKEN` + + Returns + ------- + str + """ + env_var = os.environ.get(env_var_name) + + if env_var is None: + raise RuntimeError(f"API initialized with `host=None` and `token=None` but environment variable `{env_var_name}` " f"was not found.") + + return env_var diff --git a/src/cript/api/utils/helper_functions.py b/src/cript/api/utils/helper_functions.py new file mode 100644 index 000000000..862421bb8 --- /dev/null +++ b/src/cript/api/utils/helper_functions.py @@ -0,0 +1,43 @@ +import json +from typing import Dict, List, Union + +from cript.nodes.exceptions import CRIPTJsonNodeError +from cript.nodes.util import _is_node_field_valid + + +def _get_node_type_from_json(node_json: Union[Dict, str]) -> str: + """ + takes a node JSON and output the node_type `Project`, `Material`, etc. + + 1. convert node JSON dict or str to dict + 1. do check the node list to be sure it only has a single type in it + 1. get the node type and return it + + Parameters + ---------- + node_json: [Dict, str] + + Notes + ----- + Takes a str or dict to be more versatile + + Returns + ------- + str: + node type + """ + # convert all JSON node strings to dict for easier handling + if isinstance(node_json, str): + node_json = json.loads(node_json) + try: + node_type_list: List[str] = node_json["node"] # type: ignore + except KeyError: + raise CRIPTJsonNodeError(node_list=node_json["node"], json_str=json.dumps(node_json)) # type: ignore + + # check to be sure the node list has a single type "node": ["Material"] + if _is_node_field_valid(node_type_list=node_type_list): + return node_type_list[0] + + # if invalid then raise error + else: + raise CRIPTJsonNodeError(node_list=node_type_list, json_str=str(node_json)) diff --git a/src/cript/api/utils/save_helper.py b/src/cript/api/utils/save_helper.py new file mode 100644 index 000000000..4ef2c2bba --- /dev/null +++ b/src/cript/api/utils/save_helper.py @@ -0,0 +1,164 @@ +import json +import re +import uuid +from dataclasses import dataclass, field +from typing import Dict, Set + + +@dataclass +class _InternalSaveValues: + """ + Class that carries attributes to be carried through recursive calls of _internal_save. + """ + + saved_uuid: Set[str] = field(default_factory=set) + suppress_attributes: Dict[str, Set[str]] = field(default_factory=dict) + + def __add__(self, other: "_InternalSaveValues") -> "_InternalSaveValues": + """ + Implement a short hand to combine two of these save values, with `+`. + This unions, the `saved_uuid`. + And safely unions `suppress_attributes` too. + """ + # Make a manual copy of `self`. + return_value = _InternalSaveValues(self.saved_uuid.union(other.saved_uuid), self.suppress_attributes) + + # Union the dictionary. + for uuid_str in other.suppress_attributes: + try: + # If the uuid exists in both `suppress_attributes` union the value sets + return_value.suppress_attributes[uuid_str] = return_value.suppress_attributes[uuid_str].union(other.suppress_attributes[uuid_str]) + except KeyError: + # If it only exists in one, just copy the set into the new one. + return_value.suppress_attributes[uuid_str] = other.suppress_attributes[uuid_str] + return return_value + + def __gt__(self, other): + """ + A greater comparison to see if something was added to the info. + """ + if len(self.saved_uuid) > len(other.saved_uuid): + return True + if len(self.suppress_attributes) > len(other.suppress_attributes): + return True + # If the two dicts have the same key, make sure at least one key has more suppressed attributes + if self.suppress_attributes.keys() == other.suppress_attributes.keys(): + longer_set_found = False + for key in other.suppress_attributes: + if len(self.suppress_attributes[key]) < len(other.suppress_attributes[key]): + return False + if self.suppress_attributes[key] > other.suppress_attributes[key]: + longer_set_found = True + return longer_set_found + return False + + +def _fix_node_save(api, node, response, save_values: _InternalSaveValues) -> _InternalSaveValues: + """ + Helper function, that attempts to fix a bad node. + And if it is fixable, we resave the entire node. + + Returns set of known uuids, if fixable, otherwise False. + """ + if response["code"] not in (400, 409): + raise RuntimeError(f"The internal helper function `_fix_node_save` has been called for an error that is not yet implemented to be handled {response}.") + + if response["error"].startswith("Bad uuid:") or response["error"].strip().startswith("Duplicate uuid:"): + missing_uuid = _get_uuid_from_error_message(response["error"]) + missing_node = find_node_by_uuid(node, missing_uuid) + # If the missing node, is the same as the one we are trying to save, this not working. + # We end the infinite loop here. + if missing_uuid == str(node.uuid): + return save_values + # Now we save the bad node extra. + # So it will be known when we attempt to save the graph again. + # Since we pre-saved this node, we want it to be UUID edge only the next JSON. + # So we add it to the list of known nodes + returned_save_values = api._internal_save(missing_node, save_values) + save_values += returned_save_values + # The missing node, is now known to the API + save_values.saved_uuid.add(missing_uuid) + + # Handle all duplicate items warnings if possible + if response["error"].startswith("duplicate item"): + for search_dict_str in re.findall(r"\{(.*?)\}", response["error"]): # Regular expression finds all text elements enclosed in `{}`. In the error message this is the dictionary describing the duplicated item. + # The error message contains a description of the offending elements. + search_dict_str = "{" + search_dict_str + "}" + search_dict_str = search_dict_str.replace("'", '"') + search_dict = json.loads(search_dict_str) + # These are in the exact format to use with `find_children` so we find all the offending children. + all_duplicate_nodes = node.find_children(search_dict) + for duplicate_node in all_duplicate_nodes: + # Unfortunately, even patch errors if you patch with an offending element. + # So we remove the offending element from the JSON + # TODO IF THIS IS A TRUE DUPLICATE NAME ERROR, IT WILL ERROR AS THE NAME ATTRIBUTE IS MISSING. + try: + # the search_dict convenient list all the attributes that are offending in the keys. + # So if we haven't listed the current node in the suppress attribute dict, we add the node with the offending attributes to suppress. + save_values.suppress_attributes[str(duplicate_node.uuid)] = set(search_dict.keys()) + except KeyError: + # If we have the current node in the dict, we just add the new elements to the list of suppressed attributes for it. + save_values.suppress_attributes[str(duplicate_node.uuid)].add(set(search_dict.keys())) # type: ignore + + # Attempts to save the duplicate items element. + save_values += api._internal_save(duplicate_node, save_values) + # After the save, we can reduce it to just a UUID edge in the graph (avoiding the duplicate issues). + save_values.saved_uuid.add(str(duplicate_node.uuid)) + + return save_values + + +def _get_uuid_from_error_message(error_message: str) -> str: + """ + takes an CRIPTAPISaveError and tries to get the UUID that the API is having trouble with + and return that + + Parameters + ---------- + error_message: str + + Returns + ------- + UUID + the UUID the API had trouble with + """ + bad_uuid = None + if error_message.startswith("Bad uuid: "): + bad_uuid = error_message[len("Bad uuid: ") : -len(" provided")].strip() + if error_message.strip().startswith("Duplicate uuid:"): + bad_uuid = error_message[len(" Duplicate uuid:") : -len("provided")].strip() + if bad_uuid is None or len(bad_uuid) != len(str(uuid.uuid4())): # Ensure we found a full UUID describing string (here tested against a random new uuid length.) + raise RuntimeError(f"The internal helper function `_get_uuid_from_error_message` has been called for an error message that is not yet implemented to be handled. error message {error_message}, found uuid {bad_uuid}.") + + return bad_uuid + + +def find_node_by_uuid(node, uuid_str: str): + # Use the find_children functionality to find that node in our current tree + # We can have multiple occurrences of the node, + # but it doesn't matter which one we save + # TODO some error handling, for the BUG case of not finding the UUID + missing_node = node.find_children({"uuid": uuid_str})[0] + + return missing_node + + +def _identify_suppress_attributes(node, response: Dict) -> Dict[str, Set[str]]: + suppress_attributes: Dict[str, Set[str]] = {} + if response["error"].startswith("Additional properties are not allowed"): + # Find all the attributes, that are listed in the error message with regex + attributes = set(re.findall(r"'(.*?)'", response["error"])) # regex finds all attributes in enclosing `'`. This is how the error message lists them. + + # At the end of the error message the offending path is given. + # The structure of the error message is such, that is is after `path:`, so we find and strip the path out of the message. + path = response["error"][response["error"].rfind("path:") + len("path:") :].strip() + + if path != "/": + # TODO find the UUID this belongs to + raise RuntimeError("Fixing non-root objects for patch, not implemented yet. This is a bug, please report it on https://github.com/C-Accel-CRIPT/Python-SDK/ .") + + try: + suppress_attributes[str(node.uuid)].add(attributes) # type: ignore + except KeyError: + suppress_attributes[str(node.uuid)] = attributes + return suppress_attributes diff --git a/src/cript/api/utils/web_file_downloader.py b/src/cript/api/utils/web_file_downloader.py new file mode 100644 index 000000000..10b7f13fd --- /dev/null +++ b/src/cript/api/utils/web_file_downloader.py @@ -0,0 +1,97 @@ +import os +from pathlib import Path +from typing import Union + +import requests + + +def download_file_from_url(url: str, destination_path: Union[str, Path]) -> None: + """ + downloads a file from URL + + Warnings + --------- + This is a very basic implementation that does not handle all URL files, + and will likely throw errors. + For example, some file URLs require a session or JS enabled to navigate to them + such as "https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf" + in those cases this implementation will fail. + + Parameters + ---------- + url: str + web URL to download the file from + example: https://criptscripts.org/cript_graph/graph_ppt/CRIPT_Data_Structure_Template.pptx + destination_path: Union[str, Path] + which directory and file name the file should be written to after gotten from the web + + Returns + ------- + None + just downloads the file + """ + + response = requests.get(url=url) + + # if not HTTP 200, then throw error + response.raise_for_status() + + # get extension from URL + file_extension = get_file_extension_from_url(url=url) + + # add the file extension to file path and file name + destination_path = str(destination_path) + file_extension + + destination_path = Path(destination_path) + + # write contents to a file on user disk + write_file_to_disk(destination_path=destination_path, file_contents=response.content) + + +def get_file_extension_from_url(url: str) -> str: + """ + takes a file url and returns only the extension with the dot + + Parameters + ---------- + url: str + web URL + example: "https://criptscripts.org/cript_graph/graph_ppt/CRIPT_Data_Structure_Template.pptx" + + Returns + ------- + file extension: str + file extension with dot + example: ".pptx" + """ + file_extension = os.path.splitext(url)[1] + + return file_extension + + +def write_file_to_disk(destination_path: Union[str, Path], file_contents: bytes) -> None: + """ + simply writes the file to the given destination + + Parameters + ---------- + destination_path: Union[str, Path] + which directory and file name the file should be written to after gotten from the web + file_contents: bytes + content of file to write to disk + + Returns + ------- + None + just writes the file to disk + + Raises + ------ + FileNotFoundError + In case the destination given to write the file to was not found or does not exist + """ + # convert any type of path to a Path object + destination_path = Path(destination_path) + + with open(file=destination_path, mode="wb") as file_handle: + file_handle.write(file_contents) diff --git a/src/cript/api/valid_search_modes.py b/src/cript/api/valid_search_modes.py new file mode 100644 index 000000000..7d168450f --- /dev/null +++ b/src/cript/api/valid_search_modes.py @@ -0,0 +1,35 @@ +from enum import Enum + + +class SearchModes(Enum): + """ + Available search modes to use with the CRIPT API search + + Attributes + ---------- + NODE_TYPE : str + Search by node type. + EXACT_NAME : str + Search by exact node name. + CONTAINS_NAME : str + Search by node name containing a given string. + UUID : str + Search by node UUID. + + Examples + ------- + ```python + # search by node type + materials_paginator = cript_api.search( + node_type=cript.Material, + search_mode=cript.SearchModes.NODE_TYPE, + value_to_search=None, + ) + ``` + """ + + NODE_TYPE: str = "" + EXACT_NAME: str = "exact_name" + CONTAINS_NAME: str = "contains_name" + UUID: str = "uuid" + # UUID_CHILDREN = "uuid_children" diff --git a/src/cript/api/vocabulary_categories.py b/src/cript/api/vocabulary_categories.py new file mode 100644 index 000000000..c1969236e --- /dev/null +++ b/src/cript/api/vocabulary_categories.py @@ -0,0 +1,99 @@ +from enum import Enum + + +class VocabCategories(Enum): + """ + All available [CRIPT controlled vocabulary categories](https://app.criptapp.org/vocab/) + + Controlled vocabulary categories are used to classify data. + + Attributes + ---------- + ALGORITHM_KEY: str + Algorithm key. + ALGORITHM_TYPE: str + Algorithm type. + BUILDING_BLOCK: str + Building block. + CITATION_TYPE: str + Citation type. + COMPUTATION_TYPE: str + Computation type. + COMPUTATIONAL_FORCEFIELD_KEY: str + Computational forcefield key. + COMPUTATIONAL_PROCESS_PROPERTY_KEY: str + Computational process property key. + COMPUTATIONAL_PROCESS_TYPE: str + Computational process type. + CONDITION_KEY: str + Condition key. + DATA_LICENSE: str + Data license. + DATA_TYPE: str + Data type. + EQUIPMENT_KEY: str + Equipment key. + FILE_TYPE: str + File type. + INGREDIENT_KEYWORD: str + Ingredient keyword. + MATERIAL_IDENTIFIER_KEY: str + Material identifier key. + MATERIAL_KEYWORD: str + Material keyword. + MATERIAL_PROPERTY_KEY: str + Material property key. + PARAMETER_KEY: str + Parameter key. + PROCESS_KEYWORD: str + Process keyword. + PROCESS_PROPERTY_KEY: str + Process property key. + PROCESS_TYPE: str + Process type. + PROPERTY_METHOD: str + Property method. + QUANTITY_KEY: str + Quantity key. + REFERENCE_TYPE: str + Reference type. + SET_TYPE: str + Set type. + UNCERTAINTY_TYPE: str + Uncertainty type. + + Examples + -------- + ```python + algorithm_vocabulary = api.get_vocabulary_by_category( + cript.VocabCategories.ALGORITHM_KEY + ) + ``` + """ + + ALGORITHM_KEY: str = "algorithm_key" + ALGORITHM_TYPE: str = "algorithm_type" + BUILDING_BLOCK: str = "building_block" + CITATION_TYPE: str = "citation_type" + COMPUTATION_TYPE: str = "computation_type" + COMPUTATIONAL_FORCEFIELD_KEY: str = "computational_forcefield_key" + COMPUTATIONAL_PROCESS_PROPERTY_KEY: str = "computational_process_property_key" + COMPUTATIONAL_PROCESS_TYPE: str = "computational_process_type" + CONDITION_KEY: str = "condition_key" + DATA_LICENSE: str = "data_license" + DATA_TYPE: str = "data_type" + EQUIPMENT_KEY: str = "equipment_key" + FILE_TYPE: str = "file_type" + INGREDIENT_KEYWORD: str = "ingredient_keyword" + MATERIAL_IDENTIFIER_KEY: str = "material_identifier_key" + MATERIAL_KEYWORD: str = "material_keyword" + MATERIAL_PROPERTY_KEY: str = "material_property_key" + PARAMETER_KEY: str = "parameter_key" + PROCESS_KEYWORD: str = "process_keyword" + PROCESS_PROPERTY_KEY: str = "process_property_key" + PROCESS_TYPE: str = "process_type" + PROPERTY_METHOD: str = "property_method" + QUANTITY_KEY: str = "quantity_key" + REFERENCE_TYPE: str = "reference_type" + SET_TYPE: str = "set_type" + UNCERTAINTY_TYPE: str = "uncertainty_type" diff --git a/src/cript/exceptions.py b/src/cript/exceptions.py new file mode 100644 index 000000000..3891bf646 --- /dev/null +++ b/src/cript/exceptions.py @@ -0,0 +1,12 @@ +from abc import abstractmethod + + +class CRIPTException(Exception): + """ + Parent CRIPT exception. + All CRIPT exception inherit this class. + """ + + @abstractmethod + def __str__(self) -> str: + pass diff --git a/src/cript/nodes/__init__.py b/src/cript/nodes/__init__.py new file mode 100644 index 000000000..4c5dfe051 --- /dev/null +++ b/src/cript/nodes/__init__.py @@ -0,0 +1,32 @@ +# trunk-ignore-all(ruff/F401) +from cript.nodes.primary_nodes import ( + Collection, + Computation, + ComputationProcess, + Data, + Experiment, + Inventory, + Material, + Process, + Project, + Reference, +) +from cript.nodes.subobjects import ( + Algorithm, + Citation, + ComputationalForcefield, + Condition, + Equipment, + Ingredient, + Parameter, + Property, + Quantity, + Software, + SoftwareConfiguration, +) +from cript.nodes.supporting_nodes import File, User +from cript.nodes.util import ( + NodeEncoder, + add_orphaned_nodes_to_project, + load_nodes_from_json, +) diff --git a/src/cript/nodes/core.py b/src/cript/nodes/core.py new file mode 100644 index 000000000..0eb0d4f0d --- /dev/null +++ b/src/cript/nodes/core.py @@ -0,0 +1,456 @@ +import copy +import dataclasses +import json +import re +import uuid +from abc import ABC +from dataclasses import asdict, dataclass, replace +from typing import Dict, List, Optional, Set + +from cript.nodes.exceptions import ( + CRIPTAttributeModificationError, + CRIPTExtraJsonAttributes, + CRIPTJsonSerializationError, +) + +tolerated_extra_json = [] + + +def add_tolerated_extra_json(additional_tolerated_json: str): + """ + In case a node should be loaded from JSON (such as `getting` them from the API), + but the API sends additional JSON attributes, these can be set to tolerated temporarily with this routine. + """ + tolerated_extra_json.append(additional_tolerated_json) + + +def get_new_uid(): + return "_:" + str(uuid.uuid4()) + + +class classproperty(object): + def __init__(self, f): + self.f = f + + def __get__(self, obj, owner): + if obj is None: + return self.f(owner) + return self.f(obj) + + +class BaseNode(ABC): + """ + This abstract class is the base of all CRIPT nodes. + It offers access to a json attribute class, + which reflects the data model JSON attributes. + Also, some basic shared functionality is provided by this base class. + """ + + @dataclass(frozen=True) + class JsonAttributes: + node: List[str] = dataclasses.field(default_factory=list) + uid: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @classproperty + def node_type(self): + name = type(self).__name__ + if name == "ABCMeta": + name = self.__name__ + return name + + @classproperty + def node_type_snake_case(self): + camel_case = self.node_type + # Regex to convert camel case to snake case. + snake_case = re.sub(r"(? str: + """ + Return a string representation of a node data model attributes. + + Returns + ------- + str + A string representation of the node. + """ + return str(asdict(self._json_attrs)) + + @property + def uid(self): + return self._json_attrs.uid + + @property + def node(self): + return self._json_attrs.node + + def _update_json_attrs_if_valid(self, new_json_attr: JsonAttributes) -> None: + """ + tries to update the node if valid and then checks if it is valid or not + + 1. updates the node with the new information + 1. run db schema validation on it + 1. if db schema validation succeeds then update and continue + 1. else: raise an error and tell the user what went wrong + + Parameters + ---------- + new_json_attr + + Raises + ------ + Exception + + Returns + ------- + None + """ + old_json_attrs = self._json_attrs + self._json_attrs = new_json_attr + + try: + self.validate() + except Exception as exc: + self._json_attrs = old_json_attrs + raise exc + + def validate(self, api=None, is_patch=False) -> None: + """ + Validate this node (and all its children) against the schema provided by the data bank. + + Raises: + ------- + Exception with more error information. + """ + from cript.api.api import _get_global_cached_api + + if api is None: + api = _get_global_cached_api() + api._is_node_schema_valid(self.get_json(is_patch=is_patch).json, is_patch=is_patch) + + @classmethod + def _from_json(cls, json_dict: dict): + # TODO find a way to handle uuid nodes only + + # Child nodes can inherit and overwrite this. + # They should call super()._from_json first, and modified the returned object after if necessary + # We create manually a dict that contains all elements from the send dict. + # That eliminates additional fields and doesn't require asdict. + arguments = {} + default_dataclass = cls.JsonAttributes() + for field in json_dict: + try: + getattr(default_dataclass, field) + except AttributeError: + pass + else: + arguments[field] = json_dict[field] + + # add omitted fields from default (necessary if they are required) + for field_name in [field.name for field in dataclasses.fields(default_dataclass)]: + if field_name not in arguments: + arguments[field_name] = getattr(default_dataclass, field_name) + + # If a node with this UUID already exists, we don't create a new node. + # Instead we use the existing node from the cache and just update it. + from cript.nodes.uuid_base import UUIDBaseNode + + if "uuid" in json_dict and json_dict["uuid"] in UUIDBaseNode._uuid_cache: + node = UUIDBaseNode._uuid_cache[json_dict["uuid"]] + else: # Create a new node + try: + node = cls(**arguments) + # TODO we should not catch all exceptions if we are handling them, and instead let it fail + # to create a good error message that points to the correct place that it failed to make debugging easier + except Exception as exc: + print(cls, arguments) + raise exc + + attrs = cls.JsonAttributes(**arguments) + + # Handle default attributes manually. + for field in attrs.__dict__: + # Conserve newly assigned uid if uid is default (empty) + if getattr(attrs, field) == getattr(default_dataclass, field): + attrs = replace(attrs, **{str(field): getattr(node, field)}) + + try: # TODO remove this temporary solution + if not attrs.uid.startswith("_:"): + attrs = replace(attrs, uid="_:" + attrs.uid) + except AttributeError: + pass + # But here we force even usually unwritable fields to be set. + node._update_json_attrs_if_valid(attrs) + + return node + + def __deepcopy__(self, memo): + # Ideally I would call `asdict`, but that is not allowed inside a deepcopy chain. + # Making a manual transform into a dictionary here. + arguments = {} + for field in self.JsonAttributes().__dataclass_fields__: + arguments[field] = copy.deepcopy(getattr(self._json_attrs, field), memo) + # TODO URL handling + + # Since we excluded 'uuid' from arguments, + # a new uid will prompt the creation of a new matching uuid. + uid = get_new_uid() + arguments["uid"] = uid + + # Create node and init constructor attributes + node = self.__class__(**arguments) + # Update none constructor writable attributes. + node._update_json_attrs_if_valid(self.JsonAttributes(**arguments)) + return node + + @property + def json(self): + """ + Property to obtain a simple json string. + Calls `get_json` with default arguments. + """ + # We cannot validate in `get_json` because we call it inside `validate`. + # But most uses are probably the property, so we can validate the node here. + self.validate() + return self.get_json().json + + def get_json( + self, + handled_ids: Optional[Set[str]] = None, + known_uuid: Optional[Set[str]] = None, + suppress_attributes: Optional[Dict[str, Set[str]]] = None, + is_patch: bool = False, + condense_to_uuid: Dict[str, Set[str]] = { + "Material": {"parent_material", "component"}, + "Experiment": {"data"}, + "Inventory": {"material"}, + "Ingredient": {"material"}, + "Property": {"component"}, + "ComputationProcess": {"material"}, + "Data": {"material"}, + "Process": {"product", "waste"}, + "Project": {"member", "admin"}, + "Collection": {"member", "admin"}, + }, + **kwargs + ): + """ + User facing access to get the JSON of a node. + Opposed to the also available property json this functions allows further control. + Additionally, this function does not call `self.validate()` but the property `json` does. + + Returns named tuple with json and handled ids as result. + """ + + @dataclass(frozen=True) + class ReturnTuple: + json: str + handled_ids: set + + # Do not check for circular references, since we handle them manually + kwargs["check_circular"] = kwargs.get("check_circular", False) + + # Delayed import to avoid circular imports + from cript.nodes.util import NodeEncoder + + if handled_ids is None: + handled_ids = set() + previous_handled_nodes = copy.deepcopy(NodeEncoder.handled_ids) + NodeEncoder.handled_ids = handled_ids + + # Similar to uid, we handle pre-saved known uuid such that they are UUID edges only + if known_uuid is None: + known_uuid = set() + previous_known_uuid = copy.deepcopy(NodeEncoder.known_uuid) + NodeEncoder.known_uuid = known_uuid + previous_suppress_attributes = copy.deepcopy(NodeEncoder.suppress_attributes) + NodeEncoder.suppress_attributes = suppress_attributes + previous_condense_to_uuid = copy.deepcopy(NodeEncoder.condense_to_uuid) + NodeEncoder.condense_to_uuid = condense_to_uuid + + try: + return ReturnTuple(json.dumps(self, cls=NodeEncoder, **kwargs), NodeEncoder.handled_ids) + except Exception as exc: + # TODO this handling that doesn't tell the user what happened and how they can fix it + # this just tells the user that something is wrong + # this should be improved to tell the user what went wrong and where + raise CRIPTJsonSerializationError(str(type(self)), str(self._json_attrs)) from exc + finally: + NodeEncoder.handled_ids = previous_handled_nodes + NodeEncoder.known_uuid = previous_known_uuid + NodeEncoder.suppress_attributes = previous_suppress_attributes + NodeEncoder.condense_to_uuid = previous_condense_to_uuid + + def find_children(self, search_attr: dict, search_depth: int = -1, handled_nodes=None) -> List: + """ + Finds all the children in a given tree of nodes (specified by its root), + that match the criteria of search_attr. + If a node is present multiple times in the graph, it is only once in the search results. + + search_dept: Max depth of the search into the tree. Helpful if circles are expected. -1 specifies no limit + + search_attr: dict + Dictionary that specifies which JSON attributes have to be present in a given node. + If an attribute is a list, it it is sufficient if the specified attributes are in the list, + if others are present too, that does not exclude the child. + + Example: search_attr = `{"node": ["Parameter"]}` finds all "Parameter" nodes. + search_attr = `{"node": ["Algorithm"], "parameter": {"name" : "update_frequency"}}` + finds all "Algorithm" nodes, that have a parameter "update_frequency". + Since parameter is a list an alternative notation is + ``{"node": ["Algorithm"], "parameter": [{"name" : "update_frequency"}]}` + and Algorithms are not excluded they have more parameters. + search_attr = `{"node": ["Algorithm"], "parameter": [{"name" : "update_frequency"}, + {"name" : "cutoff_distance"}]}` + finds all algorithms that have a parameter "update_frequency" and "cutoff_distance". + + """ + + def is_attr_present(node: BaseNode, key, value): + """ + Helper function that checks if an attribute is present in a node. + """ + try: + attr_key = getattr(node._json_attrs, key) + except AttributeError: + return False + + # To save code paths, I convert non-lists into lists with one element. + if not isinstance(attr_key, list): + attr_key = [attr_key] + if not isinstance(value, list): + value = [value] + + # The definition of search is, that all values in a list have to be present. + # To fulfill this AND condition, we count the number of occurrences of that value condition + number_values_found = 0 + # Runtime contribution: O(m), where is is the number of search keys + for v in value: + # Test for simple values (not-nodes) + if v in attr_key: + number_values_found += 1 + + # Test if value is present in one of the specified attributes (OR condition) + # Runtime contribution: O(m), where m is the number of nodes in the attribute list. + for attr in attr_key: + # if the attribute is a node and the search value is a dictionary, + # we can verify that this condition is met if it finds the node itself with `find_children`. + if isinstance(attr, BaseNode) and isinstance(v, dict): + # Since we only want to test the node itself and not any of its children, we set recursion to 0. + # Runtime contribution: recursive call, with depth search depth of the search dictionary O(h) + if len(attr.find_children(v, 0)) > 0: + number_values_found += 1 + # Since this an OR condition, we abort early. + # This also doesn't inflate the number_values_count, + # since every OR condition should only add a max of 1. + break + # Check if the AND condition of the values is met + return number_values_found == len(value) + + if handled_nodes is None: + handled_nodes = [] + + # Protect against cycles in graph, by handling every instance of a node only once + if self in handled_nodes: + return [] + handled_nodes += [self] + + found_children = [] + + # In this search we include the calling node itself. + # We check for this node if all specified attributes are present by counting them (AND condition). + found_attr = 0 + for key, value in search_attr.items(): + if is_attr_present(self, key, value): + found_attr += 1 + # If exactly all attributes are found, it matches the search criterion + if found_attr == len(search_attr): + found_children += [self] + + # Recursion according to the recursion depth for all node children. + if search_depth != 0: + # Loop over all attributes, runtime contribution (none, or constant (max number of attributes of a node) + for field in self._json_attrs.__dataclass_fields__: + value = getattr(self._json_attrs, field) + # To save code paths, I convert non-lists into lists with one element. + if not isinstance(value, list): + value = [value] + # Run time contribution: number of elements in the attribute list. + for v in value: + try: # Try every attribute for recursion (duck-typing) + found_children += v.find_children(search_attr, search_depth - 1, handled_nodes=handled_nodes) + except AttributeError: + pass + # Total runtime, of non-recursive call: O(m*h) + O(k) where k is the number of children for this node, + # h being the depth of the search dictionary, m being the number of nodes in the attribute list. + # Total runtime, with recursion: O(n*(k+m*h). A full graph traversal O(n) with a cost per node, that scales with the number of children per node and the search depth of the search dictionary. + return found_children + + def remove_child(self, child) -> bool: + """ + This safely removes the first found child node from the parent. + This requires exact node as we test with `is` instead of `==`. + + returns True if child was found and deleted, False if child not found, + raise DB schema exception if deletion violates DB schema. + """ + + # If we delete a child, we have to replace that with a default value. + # The easiest way to access this default value is to get it from the the default JsonAttribute of that class + default_json_attrs = self.JsonAttributes() + new_attrs = self._json_attrs + for field in self._json_attrs.__dataclass_fields__: + value = getattr(self._json_attrs, field) + if value is child: + new_attrs = replace(new_attrs, **{field: getattr(default_json_attrs, field)}) + # We only want to delete the first found child + elif not isinstance(value, str): # Strings are iterable, but we don't want them + try: # Try if we are facing a list at the moment + new_attr_list = [element for element in value] + except TypeError: + pass # It is OK if this field is not a list + else: + found_child = False + for i, list_value in enumerate(value): + if list_value is child: + found_child = True + del new_attr_list[i] + # Only delete first child. + # Important to break loop here, since value and new_attr_list are not identical any more. + if found_child: + new_attrs = replace(new_attrs, **{field: new_attr_list}) + # Again only first found place is removed + break + # Let's see if we found the child aka the new_attrs are different than the old ones + if new_attrs is self._json_attrs: + return False + self._update_json_attrs_if_valid(new_attrs) + return True diff --git a/src/cript/nodes/exceptions.py b/src/cript/nodes/exceptions.py new file mode 100644 index 000000000..58f31fdb3 --- /dev/null +++ b/src/cript/nodes/exceptions.py @@ -0,0 +1,411 @@ +from abc import ABC, abstractmethod +from typing import List + +from cript.exceptions import CRIPTException + + +class CRIPTNodeSchemaError(CRIPTException): + """ + ## Definition + This error is raised when the CRIPT [json database schema](https://json-schema.org/) + validation fails for a node. + + Please keep in mind that the CRIPT Python SDK converts all the Python nodes inside the + [Project](../../nodes/primary_nodes/project) into a giant JSON + and sends an HTTP `POST` or `PATCH` request to the API to be processed. + + However, before a request is sent to the API, the JSON is validated against API database schema + via the [JSON Schema library](https://python-jsonschema.readthedocs.io/en/stable/), + and if the database schema validation fails for whatever reason this error is shown. + + ### Possible Reasons + + 1. There was a mistake in nesting of the nodes + 1. There was a mistake in creating the nodes + 1. Nodes are missing + 1. Nodes have invalid vocabulary + * The database schema wants something a different controlled vocabulary than what is provided + 1. There was an error with the way the JSON was created within the Python SDK + * The format of the JSON the CRIPT Python SDK created was invalid + 1. There is something wrong with the database schema + + ## How to Fix + The easiest way to troubleshoot this is to examine the JSON that the SDK created via printing out the + [Project](../../nodes/primary_nodes/project) node's JSON and checking the place that the schema validation + says failed + + ### Example + ```python + print(my_project.json) + ``` + """ + + node_type: str = "" + json_schema_validation_error: str = "" + + def __init__(self, node_type: str, json_schema_validation_error: str) -> None: + self.json_schema_validation_error: str = json_schema_validation_error + self.node_type = node_type + + def __str__(self) -> str: + error_message: str = f"JSON database schema validation for node {self.node_type} failed." + error_message += f"Error: {self.json_schema_validation_error}" + + return error_message + + +class CRIPTJsonDeserializationError(CRIPTException): + """ + ## Definition + This exception is raised when converting a node from JSON to Python class fails. + This process fails when the attributes within the JSON does not match the node's class + attributes within the `JsonAttributes` of that specific node + + ### Error Example + Invalid JSON that cannot be deserialized to a CRIPT Python SDK Node + + ```json + ``` + + + ### Valid Example + Valid JSON that can be deserialized to a CRIPT Python SDK Node + + ```json + ``` + + ## How to Fix + """ + + def __init__(self, node_type: str, json_str: str) -> None: + self.node_type = node_type + self.json_str = json_str + + def __str__(self) -> str: + return f"JSON deserialization failed for node type {self.node_type} with JSON str: {self.json_str}" + + +class CRIPTDeserializationUIDError(CRIPTException): + """ + ## Definition + This exception is raised when converting a node from JSON to Python class fails, + because a node is specified with its UID only, but not part of the data graph elsewhere. + + ### Error Example + Invalid JSON that cannot be deserialized to a CRIPT Python SDK Node + + ```json + { + "node": ["Algorithm"], + "key": "mc_barostat", + "type": "barostat", + "parameter": {"node": ["Parameter"], "uid": "uid-string"} + } + ``` + Here the algorithm has a parameter attribute, but the parameter is specified as uid only. + + ### Valid Example + Valid JSON that can be deserialized to a CRIPT Python SDK Node + + ```json + { + "node": ["Algorithm"], + "key": "mc_barostat", + "type": "barostat", + "parameter": {"node": ["Parameter"], "uid": "uid-string", + "key": "update_frequency", "value":1, "unit": "1/second"} + } + ``` + Now the node is fully specified. + + ## How to Fix + Specify the full node instead. This error might appear if you try to partially load previously generated JSON. + """ + + def __init__(self, node_type: str, uid: str) -> None: + self.node_type = node_type + self.uid = uid + + def __str__(self) -> str: + return f"JSON deserialization failed for node type {self.node_type} with unknown UID: {self.uid}" + + +class CRIPTJsonNodeError(CRIPTJsonDeserializationError): + """ + ## Definition + This exception is raised if a `node` attribute is present in JSON, + but the list has more or less than exactly one type of node type. + + > Note: It is expected that there is only a single node type per JSON object. + + ### Example + !!! Example "Valid JSON representation of a Material node" + ```json + { + "node": [ + "Material" + ], + "name": "Whey protein isolate", + "uid": "_:Whey protein isolate" + }, + ``` + + ??? Example "Invalid JSON representation of a Material node" + + ```json + { + "node": [ + "Material", + "Property" + ], + "name": "Whey protein isolate", + "uid": "_:Whey protein isolate" + }, + ``` + + --- + + ```json + { + "node": [], + "name": "Whey protein isolate", + "uid": "_:Whey protein isolate" + }, + ``` + + + ## How to Fix + Debugging skills are most helpful here as there is no one-size-fits-all approach. + + It is best to identify whether the invalid JSON was created in the Python SDK + or if the invalid JSON was given from the API. + + If the Python SDK created invalid JSON during serialization, then it is helpful to track down and + identify the point where the invalid JSON was started. + + You may consider, inspecting the python objects to see if the node type are written incorrectly in python + and the issue is only being caught during serialization or if the Python node is written correctly + and the issue is created during serialization. + + If the problem is with the Python SDK or API, it is best to leave an issue or create a discussion within the + [Python SDK GitHub repository](https://github.com/C-Accel-CRIPT/Python-SDK) for one of the members of the + CRIPT team to look into any issues that there could have been. + """ + + def __init__(self, node_list: List, json_str: str) -> None: + self.node_list = node_list + self.json_str = json_str + + def __str__(self) -> str: + error_message: str = f"The 'node' attribute in the JSON string must be a single element list with the node name " f" such as `'node: ['Material']`. The `node` attribute provided was: `{self.node_list}`" f"The full JSON was: {self.json_str}." + + return error_message + + +class CRIPTJsonSerializationError(CRIPTException): + """ + ## Definition + This Exception is raised if serialization of node from JSON to Python Object fails. + + ## How to Fix + """ + + def __init__(self, node_type: str, json_dict: str) -> None: + self.node_type = node_type + self.json_str = str(json_dict) + + def __str__(self) -> str: + return f"JSON Serialization failed for node type {self.node_type} with JSON dict: {self.json_str}" + + +class CRIPTAttributeModificationError(CRIPTException): + """ + Exception that is thrown when a node attribute is modified, that wasn't intended to be modified. + """ + + def __init__(self, name, key, value): + self.name = name + self.key = key + self.value = value + + def __str__(self): + return ( + f"Attempt to modify an attribute of a node ({self.name}) that wasn't intended to be modified.\n" + f"Here the non-existing attribute {self.key} of {self.name} was attempted to be modified.\n" + "Most likely this is due to a typo in the attribute that was intended to be modified i.e. `project.materials` instead of `project.material`.\n" + "To ensure compatibility with the underlying CRIPT data model we do not allow custom attributes.\n" + ) + + +class CRIPTExtraJsonAttributes(CRIPTException): + def __init__(self, name_type: str, extra_attribute: str): + self.name_type = name_type + self.extra_attribute = extra_attribute + + def __str__(self): + return ( + f"During the construction of a node {self.name_type} an additional attribute {self.extra_attribute} was detected.\n" + "This might be a typo or an extra delivered argument from the back end.\n" + f"In the latter case, you can disable this error temporarily by calling `cript.add_tolerated_extra_json('{self.extra_attribute}')`.\n" + ) + + +class CRIPTOrphanedNodesError(CRIPTException, ABC): + """ + ## Definition + This error is raised when a child node is not attached to the + appropriate parent node. For example, all material nodes used + within a project must belong to the project inventory or are explicitly listed as material of that project. + If there is a material node that is used within a project but not a part of the + inventory and the validation code finds it then it raises an `CRIPTOrphanedNodeError` + + ## How To Fix + Fixing this is simple and easy, just take the node that CRIPT Python SDK + found a problem with and associate it with the appropriate parent via + + ``` + my_project.material += my_orphaned_material_node + ``` + """ + + def __init__(self, orphaned_node): + self.orphaned_node = orphaned_node + + @abstractmethod + def __str__(self): + pass + + +class CRIPTOrphanedMaterialError(CRIPTOrphanedNodesError): + """ + ## Definition + CRIPTOrphanedNodesError, but specific for orphaned materials. + + ## How To Fix + Handle this error by adding the orphaned materials into the parent project or its inventories. + """ + + def __init__(self, orphaned_node): + from cript.nodes.primary_nodes.material import Material + + assert isinstance(orphaned_node, Material) + super().__init__(orphaned_node) + + def __str__(self): + ret_string = "While validating a project graph, an orphaned material node was found. " + ret_string += "This material is present in the graph, but not listed in the project. " + ret_string += "Please add the node like: `my_project.material += [orphaned_material]`. " + ret_string += f"The orphaned material was {self.orphaned_node}." + return ret_string + + +class CRIPTOrphanedExperimentError(CRIPTOrphanedNodesError): + """ + ## Definition + CRIPTOrphanedNodesError, but specific for orphaned nodes that should be listed in one of the experiments. + + ## How To Fix + Handle this error by adding the orphaned node into one the parent project's experiments. + """ + + def __init__(self, orphaned_node): + super().__init__(orphaned_node) + + def __str__(self) -> str: + node_name = self.orphaned_node.node_type.lower() + ret_string = f"While validating a project graph, an orphaned {node_name} node was found. " + ret_string += f"This {node_name} node is present in the graph, but not listed in any of the experiments of the project. " + ret_string += f"Please add the node like: `your_experiment.{node_name} += [orphaned_{node_name}]`. " + ret_string += f"The orphaned {node_name} was {self.orphaned_node}." + return ret_string + + +def get_orphaned_experiment_exception(orphaned_node): + """ + Return the correct specific Exception based in the orphaned node type for nodes not correctly listed in experiment. + """ + from cript.nodes.primary_nodes.computation import Computation + from cript.nodes.primary_nodes.computation_process import ComputationProcess + from cript.nodes.primary_nodes.data import Data + from cript.nodes.primary_nodes.process import Process + + if isinstance(orphaned_node, Data): + return CRIPTOrphanedDataError(orphaned_node) + if isinstance(orphaned_node, Process): + return CRIPTOrphanedProcessError(orphaned_node) + if isinstance(orphaned_node, Computation): + return CRIPTOrphanedComputationError(orphaned_node) + if isinstance(orphaned_node, ComputationProcess): + return CRIPTOrphanedComputationalProcessError(orphaned_node) + # Base case raise the parent exception. TODO add bug warning. + return CRIPTOrphanedExperimentError(orphaned_node) + + +class CRIPTOrphanedDataError(CRIPTOrphanedExperimentError): + """ + ## Definition + CRIPTOrphanedExperimentError, but specific for orphaned Data node that should be listed in one of the experiments. + + ## How To Fix + Handle this error by adding the orphaned node into one the parent project's experiments `data` attribute. + """ + + def __init__(self, orphaned_node): + from cript.nodes.primary_nodes.data import Data + + assert isinstance(orphaned_node, Data) + super().__init__(orphaned_node) + + +class CRIPTOrphanedProcessError(CRIPTOrphanedExperimentError): + """ + ## Definition + CRIPTOrphanedExperimentError, but specific for orphaned Process node that should be + listed in one of the experiments. + + ## How To Fix + Handle this error by adding the orphaned node into one the parent project's experiments + `process` attribute. + """ + + def __init__(self, orphaned_node): + from cript.nodes.primary_nodes.process import Process + + assert isinstance(orphaned_node, Process) + super().__init__(orphaned_node) + + +class CRIPTOrphanedComputationError(CRIPTOrphanedExperimentError): + """ + ## Definition + CRIPTOrphanedExperimentError, but specific for orphaned Computation node that should be + listed in one of the experiments. + + ## How To Fix + Handle this error by adding the orphaned node into one the parent project's experiments + `Computation` attribute. + """ + + def __init__(self, orphaned_node): + from cript.nodes.primary_nodes.computation import Computation + + assert isinstance(orphaned_node, Computation) + super().__init__(orphaned_node) + + +class CRIPTOrphanedComputationalProcessError(CRIPTOrphanedExperimentError): + """ + ## Definition + CRIPTOrphanedExperimentError, but specific for orphaned ComputationalProcess + node that should be listed in one of the experiments. + + ## How To Fix + Handle this error by adding the orphaned node into one the parent project's experiments + `ComputationalProcess` attribute. + """ + + def __init__(self, orphaned_node): + from cript.nodes.primary_nodes.computation_process import ComputationProcess + + assert isinstance(orphaned_node, ComputationProcess) + super().__init__(orphaned_node) diff --git a/src/cript/nodes/primary_nodes/__init__.py b/src/cript/nodes/primary_nodes/__init__.py new file mode 100644 index 000000000..0ac298ab8 --- /dev/null +++ b/src/cript/nodes/primary_nodes/__init__.py @@ -0,0 +1,11 @@ +# trunk-ignore-all(ruff/F401) +from cript.nodes.primary_nodes.collection import Collection +from cript.nodes.primary_nodes.computation import Computation +from cript.nodes.primary_nodes.computation_process import ComputationProcess +from cript.nodes.primary_nodes.data import Data +from cript.nodes.primary_nodes.experiment import Experiment +from cript.nodes.primary_nodes.inventory import Inventory +from cript.nodes.primary_nodes.material import Material +from cript.nodes.primary_nodes.process import Process +from cript.nodes.primary_nodes.project import Project +from cript.nodes.primary_nodes.reference import Reference diff --git a/src/cript/nodes/primary_nodes/collection.py b/src/cript/nodes/primary_nodes/collection.py new file mode 100644 index 000000000..cd13a0f4e --- /dev/null +++ b/src/cript/nodes/primary_nodes/collection.py @@ -0,0 +1,284 @@ +from dataclasses import dataclass, field, replace +from typing import Any, List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode +from cript.nodes.supporting_nodes import User + + +class Collection(PrimaryBaseNode): + """ + ## Definition + + A + [Collection node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8) + is nested inside a [Project](../project) node. + + A Collection node can be thought as a folder/bucket that can hold [experiment](../experiment) + or [Inventories](../inventory) node. + + | attribute | type | example | description | + |------------|------------------|---------------------|--------------------------------------------------------------------------------| + | experiment | list[Experiment] | | experiment that relate to the collection | + | inventory | list[Inventory] | | inventory owned by the collection | + | doi | str | `10.1038/1781168a0` | DOI: digital object identifier for a published collection; CRIPT generated DOI | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | + + + ## JSON Representation + ```json + { + "name": "my collection JSON", + "node":["Collection"], + "uid":"_:fccd3549-07cb-4e23-ba79-323597ec9bfd", + "uuid":"fccd3549-07cb-4e23-ba79-323597ec9bfd" + + "experiment":[ + { + "name":"my experiment name", + "node":["Experiment"], + "uid":"_:8256b75b-1f4e-4f69-9fe6-3bcb2298e470", + "uuid":"8256b75b-1f4e-4f69-9fe6-3bcb2298e470" + } + ], + "inventory":[], + "citation":[], + } + ``` + + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Collection attributes + """ + + # TODO add proper typing in future, using Any for now to avoid circular import error + member: List[User] = field(default_factory=list) + admin: List[User] = field(default_factory=list) + experiment: List[Any] = field(default_factory=list) + inventory: List[Any] = field(default_factory=list) + doi: str = "" + citation: List[Any] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, name: str, experiment: Optional[List[Any]] = None, inventory: Optional[List[Any]] = None, doi: str = "", citation: Optional[List[Any]] = None, notes: str = "", **kwargs) -> None: + """ + create a Collection with a name + add list of experiment, inventory, citation, doi, and notes if available. + + Parameters + ---------- + name: str + name of the Collection you want to make + experiment: Optional[List[Experiment]], default=None + list of experiment within the Collection + inventory: Optional[List[Inventory]], default=None + list of inventories within this collection + doi: str = "", default="" + cript doi + citation: Optional[List[Citation]], default=None + List of citations for this collection + + Returns + ------- + None + Instantiates a Collection node + """ + super().__init__(name=name, notes=notes, **kwargs) + + if experiment is None: + experiment = [] + + if inventory is None: + inventory = [] + + if citation is None: + citation = [] + + self._json_attrs = replace( + self._json_attrs, + name=name, + experiment=experiment, + inventory=inventory, + doi=doi, + citation=citation, + ) + + self.validate() + + @property + @beartype + def member(self) -> List[User]: + return self._json_attrs.member.copy() + + @property + @beartype + def admin(self) -> List[User]: + return self._json_attrs.admin + + @property + @beartype + def experiment(self) -> List[Any]: + """ + List of all [experiment](../experiment) within this Collection + + Examples + -------- + ```python + my_collection.experiment = [my_first_experiment] + ``` + + Returns + ------- + List[Experiment] + list of all [experiment](../experiment) within this Collection + """ + return self._json_attrs.experiment.copy() # type: ignore + + @experiment.setter + @beartype + def experiment(self, new_experiment: List[Any]) -> None: + """ + sets the Experiment list within this collection + + Parameters + ---------- + new_experiment: List[Experiment] + list of experiment + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, experiment=new_experiment) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def inventory(self) -> List[Any]: + """ + List of [inventory](../inventory) that belongs to this collection + + Examples + -------- + ```python + material_1 = cript.Material( + name="material 1", + identifiers=[{"alternative_names": "material 1 alternative name"}], + ) + + material_2 = cript.Material( + name="material 2", + identifiers=[{"alternative_names": "material 2 alternative name"}], + ) + + my_inventory = cript.Inventory( + name="my inventory name", materials_list=[material_1, material_2] + ) + + my_collection.inventory = [my_inventory] + ``` + + Returns + ------- + inventory: List[Inventory] + list of inventories in this collection + """ + return self._json_attrs.inventory.copy() # type: ignore + + @inventory.setter + @beartype + def inventory(self, new_inventory: List[Any]) -> None: + """ + Sets the List of inventories within this collection to a new list + + Parameters + ---------- + new_inventory: List[Inventory] + new list of inventories for the collection to overwrite the current list + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, inventory=new_inventory) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def doi(self) -> str: + """ + The CRIPT DOI for this collection + + ```python + my_collection.doi = "10.1038/1781168a0" + ``` + + Returns + ------- + doi: str + the CRIPT DOI e.g. `10.1038/1781168a0` + """ + return self._json_attrs.doi + + @doi.setter + @beartype + def doi(self, new_doi: str) -> None: + """ + set the CRIPT DOI for this collection to new CRIPT DOI + + Parameters + ---------- + new_doi: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, doi=new_doi) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Any]: + """ + List of Citations within this Collection + + Examples + -------- + ```python + my_citation = cript.Citation(type="derived_from", reference=simple_reference_node) + + my_collections.citation = my_citations + ``` + + Returns + ------- + citation: List[Citation]: + list of Citations within this Collection + """ + return self._json_attrs.citation.copy() # type: ignore + + @citation.setter + @beartype + def citation(self, new_citation: List[Any]) -> None: + """ + set the list of citations for this Collection + + Parameters + ---------- + new_citation: List[Citation] + set the list of citations for this Collection + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/computation.py b/src/cript/nodes/primary_nodes/computation.py new file mode 100644 index 000000000..435eeaa47 --- /dev/null +++ b/src/cript/nodes/primary_nodes/computation.py @@ -0,0 +1,456 @@ +from dataclasses import dataclass, field, replace +from typing import Any, List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +class Computation(PrimaryBaseNode): + """ + ## Definition + + The + [Computation node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=14) + describes the transformation of data or the creation of a computational data + set. + + **Common computations for simulations** are energy minimization, annealing, quenching, or + NPT/NVT (isothermal-isobaric/canonical ensemble) simulations. + + **Common computations for experimental** data include fitting a reaction model to kinetic data + to determine rate constants, a plateau modulus from a time-temperature-superposition, or calculating radius of + gyration with the Debye function from small angle scattering data. + + + + ## Attributes + | attribute | type | example | description | required | vocab | + |--------------------------|-------------------------------|---------------------------------------|-----------------------------------------------|----------|-------| + | type | str | general molecular dynamics simulation | category of computation | True | True | + | input_data | list[Data] | | input data nodes | | | + | output_data | list[Data] | | output data nodes | | | + | software_ configurations | list[Software Configuration] | | software and algorithms used | | | + | condition | list[Condition] | | setup information | | | + | prerequisite_computation | Computation | | prior computation method in chain | | | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + | notes | str | | additional description of the step | | | + + ## JSON Representation + ```json + { + "name":"my computation name", + "node":["Computation"], + "type":"analysis", + "uid":"_:69f29bec-e30a-4932-b78d-2e4585b37d74", + "uuid":"69f29bec-e30a-4932-b78d-2e4585b37d74" + "citation":[], + } + ``` + + + ## Available Subobjects + * [Software Configuration](../../subobjects/software_configuration) + * [Condition](../../subobjects/condition) + * [Citation](../../subobjects/citation) + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all computation nodes attributes + """ + + type: str = "" + # TODO add proper typing in future, using Any for now to avoid circular import error + input_data: List[Any] = field(default_factory=list) + output_data: List[Any] = field(default_factory=list) + software_configuration: List[Any] = field(default_factory=list) + condition: List[Any] = field(default_factory=list) + prerequisite_computation: Optional["Computation"] = None + citation: List[Any] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + name: str, + type: str, + input_data: Optional[List[Any]] = None, + output_data: Optional[List[Any]] = None, + software_configuration: Optional[List[Any]] = None, + condition: Optional[List[Any]] = None, + prerequisite_computation: Optional["Computation"] = None, + citation: Optional[List[Any]] = None, + notes: str = "", + **kwargs + ) -> None: + """ + create a computation node + + Parameters + ---------- + name: str + name of computation node + type: str + type of computation node. Computation type must come from CRIPT controlled vocabulary + input_data: List[Data] default=None + input data (data node) + output_data: List[Data] default=None + output data (data node) + software_configuration: List[SoftwareConfiguration] default=None + software configuration of computation node + condition: List[Condition] default=None + condition for the computation node + prerequisite_computation: Computation default=None + prerequisite computation + citation: List[Citation] default=None + list of citations + notes: str = "" + any notes for this computation node + **kwargs + for internal use of deserialize JSON from API to node + + Examples + -------- + ```python + my_computation = cript.Computation(name="my computation name", type="analysis") + ``` + + Returns + ------- + None + instantiate a computation node + + """ + super().__init__(name=name, notes=notes, **kwargs) + + if input_data is None: + input_data = [] + + if output_data is None: + output_data = [] + + if software_configuration is None: + software_configuration = [] + + if condition is None: + condition = [] + + if citation is None: + citation = [] + + self._json_attrs = replace( + self._json_attrs, + type=type, + input_data=input_data, + output_data=output_data, + software_configuration=software_configuration, + condition=condition, + prerequisite_computation=prerequisite_computation, + citation=citation, + ) + + self.validate() + + # ------------------ Properties ------------------ + + @property + @beartype + def type(self) -> str: + """ + The type of computation + + The [computation type](https://app.criptapp.org/vocab/computation_type) + must come from CRIPT controlled vocabulary + + Examples + -------- + ```python + my_computation.type = type="analysis" + ``` + + Returns + ------- + str + type of computation + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_computation_type: str) -> None: + """ + set the computation type + + the computation type must come from CRIPT controlled vocabulary + + Parameters + ---------- + new_computation_type: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, type=new_computation_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def input_data(self) -> List[Any]: + """ + List of input data (data nodes) for this node + + Examples + -------- + ```python + # create file node + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + ) + + # create a data node + my_input_data = cript.Data(name="my data name", type="afm_amp", files=[my_file]) + + my_computation.input_data = [my_input_data] + ``` + + Returns + ------- + List[Data] + list of input data for this computation + """ + return self._json_attrs.input_data.copy() + + @input_data.setter + @beartype + def input_data(self, new_input_data_list: List[Any]) -> None: + """ + set the input data list + + Parameters + ---------- + new_input_data_list: List[Data] + list of input data (data nodes) to replace the current + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, input_data=new_input_data_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def output_data(self) -> List[Any]: + """ + List of output data (data nodes) + + Examples + -------- + ```python + # create file node + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + ) + + # create a data node + my_output_data = cript.Data(name="my data name", type="afm_amp", files=[my_file]) + + my_computation.output_data = [my_output_data] + ``` + + Returns + ------- + List[Data] + list of output data for this computation + """ + return self._json_attrs.output_data.copy() + + @output_data.setter + @beartype + def output_data(self, new_output_data_list: List[Any]) -> None: + """ + set the list of output data (data nodes) for this node + + Parameters + ---------- + new_output_data_list: List[Data] + replace the current list of output data for this node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, output_data=new_output_data_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def software_configuration(self) -> List[Any]: + """ + List of software_configuration for this computation node + + Examples + -------- + ```python + # create software configuration node + my_software_configuration = cript.SoftwareConfiguration(software=simple_software_node) + + my_computation.software_configuration = my_software_configuration + ``` + + Returns + ------- + List[SoftwareConfiguration] + list of software configurations + """ + return self._json_attrs.software_configuration.copy() + + @software_configuration.setter + @beartype + def software_configuration(self, new_software_configuration_list: List[Any]) -> None: + """ + set the list of software_configuration for this computation node + + Parameters + ---------- + new_software_configuration_list: List[software_configuration] + new_software_configuration_list to replace the current one + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, software_configuration=new_software_configuration_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def condition(self) -> List[Any]: + """ + List of condition for this computation node + + Examples + -------- + ```python + # create a condition node + my_condition = cript.Condition(key="atm", type="min", value=1) + + my_computation.condition = my_condition + ``` + + Returns + ------- + List[Condition] + list of condition for the computation node + """ + return self._json_attrs.condition.copy() + + @condition.setter + @beartype + def condition(self, new_condition_list: List[Any]) -> None: + """ + set the list of condition for this node + + Parameters + ---------- + new_condition_list: List[Condition] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, condition=new_condition_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def prerequisite_computation(self) -> Optional["Computation"]: + """ + prerequisite computation + + Examples + -------- + ```python + # create computation node for prerequisite_computation + my_prerequisite_computation = cript.Computation(name="my prerequisite computation name", type="data_fit") + + my_computation.prerequisite_computation = my_prerequisite_computation + ``` + + Returns + ------- + Computation + prerequisite computation + """ + return self._json_attrs.prerequisite_computation + + @prerequisite_computation.setter + @beartype + def prerequisite_computation(self, new_prerequisite_computation: Optional["Computation"]) -> None: + """ + set new prerequisite_computation + + Parameters + ---------- + new_prerequisite_computation: "Computation" + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, prerequisite_computation=new_prerequisite_computation) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Any]: + """ + List of citations + + Examples + -------- + ```python + # create a reference node for the citation + my_reference = cript.Reference(type="journal_article", title="'Living' Polymers") + + # create a reference + my_citation = cript.Citation(type="derived_from", reference=my_reference) + + my_computation.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of citations for this computation node + """ + return self._json_attrs.citation.copy() # type: ignore + + @citation.setter + @beartype + def citation(self, new_citation_list: List[Any]) -> None: + """ + set the List of citations + + Parameters + ---------- + new_citation_list: List[Citation] + list of citations for this computation node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation_list) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/computation_process.py b/src/cript/nodes/primary_nodes/computation_process.py new file mode 100644 index 000000000..56e1ad2cb --- /dev/null +++ b/src/cript/nodes/primary_nodes/computation_process.py @@ -0,0 +1,589 @@ +from dataclasses import dataclass, field, replace +from typing import Any, List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +class ComputationProcess(PrimaryBaseNode): + """ + ## Definition + + A + [Computational_Process](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=15) + is a simulation that processes or changes a virtual material. Examples + include simulations of chemical reactions, chain scission, cross-linking, strong shear, etc. A + computational process may also encapsulate any computation that dramatically changes the + materials properties, molecular topology, and physical aspects like molecular orientation, etc. The + computation_forcefield of a simulation is associated with a material. As a consequence, if the + forcefield changes or gets refined via a computational procedure (density functional theory, + iterative Boltzmann inversion for coarse-graining etc.) this forcefield changing step must be + described as a computational_process and a new material node with a different + computation_forcefield needs to be created. + + ## Attributes + | attribute | type | example | description | required | vocab | + |--------------------------|-------------------------------|---------------------------------------|-------------------------------------------------|----------|-------| + | type | str | general molecular dynamics simulation | category of computation | True | True | + | input_data | list[Data] | | input data nodes | True | | + | output_data | list[Data] | | output data nodes | | | + | ingredient | list[Ingredient] | | ingredients | True | | + | software_ configurations | list[Software Configuration] | | software and algorithms used | | | + | condition | list[Condition] | | setup information | | | + | property | list[Property] | | computation process properties | | | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + | notes | str | | additional description of the step | | | + + + ## Available Subobjects + * [ingredient](../../subobjects/ingredient) + * [software_configuration](../../subobjects/software_configuration) + * [property](../../subobjects/property) + * [condition](../../subobjects/condition) + * [citation](../../subobjects/citation) + + ## JSON Representation + ```json + { + "name":"my computational process node name", + "node":["ComputationProcess"], + "type":"cross_linking", + "uid":"_:b88ac0a5-b5c0-4197-a63d-b37e1fe8c6c6", + "uuid":"b88ac0a5-b5c0-4197-a63d-b37e1fe8c6c6" + "ingredient":[ + { + "node":["Ingredient"], + "uid":"_:f68d6fff-9327-48b1-9249-33ce498005e8", + "uuid":"f68d6fff-9327-48b1-9249-33ce498005e8" + "keyword":["catalyst"], + "material":{ + "name":"my material name", + "node":["Material"], + "uid":"_:3b12f92c-2121-4520-920e-b4c5622de34a", + "uuid":"3b12f92c-2121-4520-920e-b4c5622de34a", + "bigsmiles":"[H]{[>][<]C(C[>])c1ccccc1[]}", + }, + + "quantity":[ + { + "key":"mass", + "node":["Quantity"], + "uid":"_:07c4a6a9-9385-4505-a30a-ca3549cedcd8", + "uuid":"07c4a6a9-9385-4505-a30a-ca3549cedcd8", + "uncertainty":0.2, + "uncertainty_type":"stdev", + "unit":"kg", + "value":11.2 + } + ] + } + ], + "input_data":[ + { + "name":"my data name", + "node":["Data"], + "type":"afm_amp", + "uid":"_:3c16bb05-ded1-4f52-9d02-c88c1a1de915", + "uuid":"3c16bb05-ded1-4f52-9d02-c88c1a1de915" + "file":[ + { + "name":"my file node name", + "node":["File"], + "source":"https://criptapp.org", + "type":"calibration", + "data_dictionary":"my file's data dictionary", + "extension":".csv", + "uid":"_:ee8153db-4108-49e4-8c5b-ffc26d4e6f71", + "uuid":"ee8153db-4108-49e4-8c5b-ffc26d4e6f71" + } + ], + } + ], + } + ``` + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all computational_process nodes attributes + """ + + type: str = "" + # TODO add proper typing in future, using Any for now to avoid circular import error + input_data: List[Any] = field(default_factory=list) + output_data: List[Any] = field(default_factory=list) + ingredient: List[Any] = field(default_factory=list) + software_configuration: List[Any] = field(default_factory=list) + condition: List[Any] = field(default_factory=list) + property: List[Any] = field(default_factory=list) + citation: List[Any] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + name: str, + type: str, + input_data: List[Any], + ingredient: List[Any], + output_data: Optional[List[Any]] = None, + software_configuration: Optional[List[Any]] = None, + condition: Optional[List[Any]] = None, + property: Optional[List[Any]] = None, + citation: Optional[List[Any]] = None, + notes: str = "", + **kwargs + ): + """ + create a computational_process node + + Examples + -------- + ```python + + # create file node for input data node + data_files = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + ) + + # create input data node + input_data = cript.Data(name="my data name", type="afm_amp", files=[data_files]) + + # Material node for Quantity node + my_material = cript.Material( + name="my material", + identifiers=[{"alternative_names": "my material alternative name"}] + ) + + # create quantity node + my_quantity = cript.Quantity(key="mass", value=1.23, unit="gram") + + # create ingredient node + ingredient = cript.Ingredient( + material=my_material, + quantities=[my_quantity], + ) + + # create computational process node + my_computational_process = cript.ComputationalProcess( + name="my computational process name", + type="cross_linking", + input_data=[input_data], + ingredient=[ingredient], + ) + ``` + + + Parameters + ---------- + name: str + computational process name + type: str + type of computation process from CRIPT controlled vocabulary + input_data: List[Data] + list of input data for computational process + ingredient: List[Ingredient] + list of ingredients for this computational process node + output_data: List[Data] default=None + list of output data for this computational process node + software_configuration: List[SoftwareConfiguration] default=None + list of software configurations for this computational process node + condition: List[Condition] default=None + list of condition for this computational process node + property: List[Property] default=None + list of properties for this computational process node + citation: List[Citation] default=None + list of citation for this computational process node + notes: str default="" + optional notes for the computational process node + + Returns + ------- + None + instantiate computationalProcess node + """ + super().__init__(name=name, notes=notes, **kwargs) + + # TODO validate type from vocab + + if input_data is None: + input_data = [] + + if ingredient is None: + ingredient = [] + + if output_data is None: + output_data = [] + + if software_configuration is None: + software_configuration = [] + + if condition is None: + condition = [] + + if property is None: + property = [] + + if citation is None: + citation = [] + + self._json_attrs = replace( + self._json_attrs, + type=type, + input_data=input_data, + ingredient=ingredient, + output_data=output_data, + software_configuration=software_configuration, + condition=condition, + property=property, + citation=citation, + ) + + # self.validate() + + @property + @beartype + def type(self) -> str: + """ + The [computational process type](https://app.criptapp.org/vocab/computational_process_type) + must come from CRIPT Controlled vocabulary + + Examples + -------- + ```python + my_computational_process.type = "DPD" + ``` + + Returns + ------- + str + computational process type + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_type: str) -> None: + """ + set the computational_process type + + computational_process type must come from CRIPT controlled vocabulary + + Parameters + ---------- + new_type: str + new computational process type. + computational process type must come from CRIPT controlled vocabulary + + Returns + ------- + None + """ + # TODO check computational_process type with CRIPT controlled vocabulary + new_attrs = replace(self._json_attrs, type=new_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def input_data(self) -> List[Any]: + """ + List of input data for the computational process node + + Examples + -------- + ```python + # create file node for the data node + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + ) + + # create input data node + my_input_data = cript.Data(name="my input data name", type="afm_amp", files=[my_file]) + + # set computational process data node + my_computation.input_data = my_input_data + ``` + + Returns + ------- + List[Data] + list of input data for this computational process node + """ + return self._json_attrs.input_data.copy() + + @input_data.setter + @beartype + def input_data(self, new_input_data_list: List[Any]) -> None: + """ + set the input data for this computational process + + Parameters + ---------- + new_input_data_list: List[Data] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, input_data=new_input_data_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def output_data(self) -> List[Any]: + """ + List of the output data for the computational_process + + Examples + -------- + ```python + # create file node for the data node + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + ) + + # create input data node + my_output_data = cript.Data(name="my output data name", type="afm_amp", files=[my_file]) + + # set computational process data node + my_computation.output_data = my_input_data + ``` + + Returns + ------- + List[Data] + list of output data from this computational process node + """ + return self._json_attrs.output_data.copy() + + @output_data.setter + @beartype + def output_data(self, new_output_data_list: List[Any]) -> None: + """ + set the output_data list for the computational_process + + Parameters + ---------- + new_output_data_list: List[Data] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, output_data=new_output_data_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def ingredient(self) -> List[Any]: + """ + List of ingredients for the computational_process + + Examples + -------- + ```python + # create ingredient node + my_ingredient = cript.Ingredient( + material=simple_material_node, + quantities=[simple_quantity_node], + ) + + my_computational_process.ingredient = my_ingredient + ``` + + Returns + ------- + List[Ingredient] + list of ingredients for this computational process + """ + return self._json_attrs.ingredient.copy() + + @ingredient.setter + @beartype + def ingredient(self, new_ingredient_list: List[Any]) -> None: + """ + set the ingredients list for this computational process + + Parameters + ---------- + new_ingredient_list: List[Ingredient] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, ingredient=new_ingredient_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def software_configuration(self) -> List[Any]: + """ + List of software_configuration for the computational process + + Examples + -------- + ```python + # create software configuration node + my_software_configuration = cript.SoftwareConfiguration(software=simple_software_node) + + my_computational_process.software_configuration = my_software_configuration + ``` + + Returns + ------- + List[SoftwareConfiguration] + List of software configurations used for this computational process node + """ + return self._json_attrs.software_configuration.copy() + + @software_configuration.setter + @beartype + def software_configuration(self, new_software_configuration_list: List[Any]) -> None: + """ + set the list of software_configuration for the computational process + + Parameters + ---------- + new_software_configuration_list: List[SoftwareConfiguration] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, software_configuration=new_software_configuration_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def condition(self) -> List[Any]: + """ + List of condition for the computational process + + Examples + -------- + ```python + # create condition node + my_condition = cript.Condition(key="atm", type="min", value=1) + + my_computational_process.condition = [my_condition] + + ``` + + Returns + ------- + List[Condition] + list of condition for this computational process node + """ + return self._json_attrs.condition.copy() + + @condition.setter + @beartype + def condition(self, new_condition: List[Any]) -> None: + """ + set the condition for the computational process + + Parameters + ---------- + new_condition: List[Condition] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, condition=new_condition) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Any]: + """ + List of citation for the computational process + + Examples + -------- + ```python + # create a reference node for the citation + my_reference = cript.Reference(type="journal_article", title="'Living' Polymers") + + # create a reference + my_citation = cript.Citation(type="derived_from", reference=my_reference) + + my_computational_process.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of citation for this computational process + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation_list: List[Any]) -> None: + """ + set the citation list for the computational process node + + Parameters + ---------- + new_citation_list: List[Citation] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def property(self) -> List[Any]: + """ + List of properties + + Examples + -------- + ```python + # create a property node + my_property = cript.Property(key="modulus_shear", type="min", value=1.23, unit="gram") + + my_computational_process.property = [my_property] + ``` + + Returns + ------- + List[Property] + list of properties for this computational process node + """ + return self._json_attrs.property.copy() + + @property.setter + @beartype + def property(self, new_property_list: List[Any]) -> None: + """ + set the properties list for the computational process + + Parameters + ---------- + new_property_list: List[Property] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, property=new_property_list) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/data.py b/src/cript/nodes/primary_nodes/data.py new file mode 100644 index 000000000..a6134d05c --- /dev/null +++ b/src/cript/nodes/primary_nodes/data.py @@ -0,0 +1,431 @@ +from dataclasses import dataclass, field, replace +from typing import Any, List, Optional, Union + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +class Data(PrimaryBaseNode): + """ + ## Definition + A [Data node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=13) + node contains the meta-data to describe raw data that is beyond a single value, (i.e. n-dimensional data). + Each `Data` node must be linked to a single `Experiment` node. + + ## Available Sub-Objects + * [Citation](../../subobjects/citation) + + ## Attributes + | Attribute | Type | Example | Description | Required | + |---------------------|---------------------------------------------------|----------------------------|-----------------------------------------------------------------------------------------|----------| + | experiment | [Experiment](experiment.md) | | Experiment the data belongs to | True | + | name | str | `"my_data_name"` | Name of the data node | True | + | type | str | `"nmr_h1"` | Pick from [CRIPT data type controlled vocabulary](https://app.criptapp.org/keys/data-type/) | True | + | file | List[[File](../supporting_nodes/file.md)] | `[file_1, file_2, file_3]` | list of file nodes | False | + | sample_preparation | [Process](process.md) | | | False | + | computation | List[[Computation](computation.md)] | | data produced from this Computation method | False | + | computation_process | [Computational Process](./computation_process.md) | | data was produced from this computation process | False | + | material | List[[Material](./material.md)] | | materials with attributes associated with the data node | False | + | process | List[[Process](./process.md)] | | processes with attributes associated with the data node | False | + | citation | [Citation](../subobjects/citation.md) | | reference to a book, paper, or scholarly work | False | + + Example + -------- + ```python + # create file node + cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + ) + + + # create data node with required arguments + my_data = cript.Data(name="my data name", type="afm_amp", file=[simple_file_node]) + ``` + + ## JSON Representation + ```json + { + "name":"my data name", + "node":["Data"], + "type":"afm_amp", + "uid":"_:80b02470-73d0-416e-8d93-12fdf69e481a", + "uuid":"80b02470-73d0-416e-8d93-12fdf69e481a" + "file":[ + { + "node":["File"], + "name":"my file node name", + "uid":"_:535779ea-0d1f-4b23-b3e8-60052f717307", + "uuid":"535779ea-0d1f-4b23-b3e8-60052f717307" + "type":"calibration", + "source":"https://criptapp.org", + "extension":".csv", + "data_dictionary":"my file's data dictionary", + } + ] + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Data attributes + """ + + type: str = "" + # TODO add proper typing in future, using Any for now to avoid circular import error + file: List[Any] = field(default_factory=list) + sample_preparation: Any = field(default_factory=list) + computation: List[Any] = field(default_factory=list) + computation_process: Any = field(default_factory=list) + material: List[Any] = field(default_factory=list) + process: List[Any] = field(default_factory=list) + citation: List[Any] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + name: str, + type: str, + file: List[Any], + sample_preparation: Any = None, + computation: Optional[List[Any]] = None, + computation_process: Optional[Any] = None, + material: Optional[List[Any]] = None, + process: Optional[List[Any]] = None, + citation: Optional[List[Any]] = None, + notes: str = "", + **kwargs + ): + super().__init__(name=name, notes=notes, **kwargs) + + if file is None: + file = [] + + if sample_preparation is None: + sample_preparation = [] + + if computation is None: + computation = [] + + if computation_process is None: + computation_process = [] + + if material is None: + material = [] + + if process is None: + process = [] + + if citation is None: + citation = [] + + self._json_attrs = replace( + self._json_attrs, + type=type, + file=file, + sample_preparation=sample_preparation, + computation=computation, + computation_process=computation_process, + material=material, + process=process, + citation=citation, + ) + + self.validate() + + @property + @beartype + def type(self) -> str: + """ + The data type must come from [CRIPT data type vocabulary](https://app.criptapp.org/vocab/data_type) + + Example + ------- + ```python + data.type = "afm_height" + ``` + + Returns + ------- + data type: str + data type for the data node must come from CRIPT controlled vocabulary + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_data_type: str) -> None: + """ + set the data type. + The data type must come from [CRIPT data type vocabulary]() + + Parameters + ---------- + new_data_type: str + new data type to replace the current data type + + Returns + ------- + None + """ + # TODO validate that the data type is valid from CRIPT controlled vocabulary + new_attrs = replace(self._json_attrs, type=new_data_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def file(self) -> List[Any]: + """ + get the list of files for this data node + + Examples + -------- + ```python + # create a list of file nodes + my_new_files = [ + # file with link source + cript.File( + source="https://pubs.acs.org/doi/10.1021/acscentsci.3c00011", + type="computation_config", + extension=".pdf", + data_dictionary="my second file data dictionary", + ), + ] + + data_node.file = my_new_files + ``` + + Returns + ------- + List[File] + list of files for this data node + """ + return self._json_attrs.file.copy() + + @file.setter + @beartype + def file(self, new_file_list: List[Any]) -> None: + """ + set the list of file for this data node + + Parameters + ---------- + new_files_list: List[File] + new list of file nodes to replace the current list + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, file=new_file_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def sample_preparation(self) -> Union[Any, None]: + """ + The sample preparation for this data node + + Returns + ------- + sample_preparation: Process + sample preparation for this data node + """ + return self._json_attrs.sample_preparation + + @sample_preparation.setter + @beartype + def sample_preparation(self, new_sample_preparation: Union[Any, None]) -> None: + """ + set sample_preparation + + Parameters + ---------- + new_sample_preparation: Process + new_sample_preparation to replace the current one for this node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, sample_preparation=new_sample_preparation) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def computation(self) -> List[Any]: + """ + list of computation nodes for this material node + + Returns + ------- + None + list of computation nodes + """ + return self._json_attrs.computation.copy() + + @computation.setter + @beartype + def computation(self, new_computation_list: List[Any]) -> None: + """ + set list of computation for this data node + + Parameters + ---------- + new_computation_list: List[Computation] + new computation list to replace the current one + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, computation=new_computation_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def computation_process(self) -> Union[Any, None]: + """ + The computation_process for this data node + + Returns + ------- + ComputationalProcess + computational process node for this data node + """ + return self._json_attrs.computation_process + + @computation_process.setter + @beartype + def computation_process(self, new_computation_process: Union[Any, None]) -> None: + """ + set the computational process + + Parameters + ---------- + new_computation_process: ComputationalProcess + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, computation_process=new_computation_process) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def material(self) -> List[Any]: + """ + List of materials for this node + + Returns + ------- + List[Material] + list of material + """ + return self._json_attrs.material.copy() + + @material.setter + @beartype + def material(self, new_material_list: List[Any]) -> None: + """ + set the list of materials for this data node + + Parameters + ---------- + new_material_list: List[Material] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, material=new_material_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def process(self) -> List[Any]: + """ + list of [Process nodes](./process.md) for this data node + + Notes + ----- + Please note that while the process attribute of the data node is currently set to `Any` + the software still expects a Process node in the data's process attribute + > It is currently set to `Any` to avoid the circular import error + + Returns + ------- + List[Process] + list of process for the data node + """ + return self._json_attrs.process.copy() + + @process.setter + @beartype + def process(self, new_process_list: List[Any]) -> None: + """ + set the list of process for this data node + + Parameters + ---------- + new_process_list: List[Process] + new list of Process + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, process=new_process_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Any]: + """ + List of [citation](../../subobjects/citation) within the data node + + Example + ------- + ```python + # create a reference node + my_reference = cript.Reference(type="journal_article", title="'Living' Polymers") + + # create a citation list to house all the reference nodes + my_citation = cript.Citation(type="derived_from", reference=my_reference) + + # add citations to data node + my_data.citation = my_citations + ``` + + Returns + ------- + List[Citation] + list of citations for this data node + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation_list: List[Any]) -> None: + """ + set the list of citation + + Parameters + ---------- + new_citation_list: List[Citation] + new list of citation to replace the current one + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation_list) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/experiment.py b/src/cript/nodes/primary_nodes/experiment.py new file mode 100644 index 000000000..b010aa894 --- /dev/null +++ b/src/cript/nodes/primary_nodes/experiment.py @@ -0,0 +1,402 @@ +from dataclasses import dataclass, field, replace +from typing import Any, List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +class Experiment(PrimaryBaseNode): + """ + ## Definition + An + [Experiment node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=9) + is nested inside a [Collection](../collection) node. + + ## Attributes + + | attribute | type | description | required | + |---------------------|------------------------------|-----------------------------------------------------------|----------| + | collection | Collection | collection associated with the experiment | True | + | process | List[Process] | process nodes associated with this experiment | False | + | computations | List[Computation] | computation method nodes associated with this experiment | False | + | computation_process | List[Computational Process] | computation process nodes associated with this experiment | False | + | data | List[Data] | data nodes associated with this experiment | False | + | funding | List[str] | funding source for experiment | False | + | citation | List[Citation] | reference to a book, paper, or scholarly work | False | + + + ## Subobjects + An + [Experiment node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=9) + can be thought as a folder/bucket that can hold: + + * [Process](../process) + * [Computations](../computation) + * [Computation_Process](../computation_process) + * [Data](../data) + * [Funding](./#cript.nodes.primary_nodes.experiment.Experiment.funding) + * [Citation](../../subobjects/citation) + + + Warnings + -------- + !!! warning "Experiment names" + Experiment names **MUST** be unique within a [Collection](../collection) + + --- + + ## JSON Representation + ```json + { + "name":"my experiment name", + "node":["Experiment"], + "uid":"_:886c4deb-2186-4f11-8134-a37111200b83", + "uuid":"886c4deb-2186-4f11-8134-a37111200b83" + } + ``` + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Collection attributes + """ + + process: List[Any] = field(default_factory=list) + computation: List[Any] = field(default_factory=list) + computation_process: List[Any] = field(default_factory=list) + data: List[Any] = field(default_factory=list) + funding: List[str] = field(default_factory=list) + citation: List[Any] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + name: str, + process: Optional[List[Any]] = None, + computation: Optional[List[Any]] = None, + computation_process: Optional[List[Any]] = None, + data: Optional[List[Any]] = None, + funding: Optional[List[str]] = None, + citation: Optional[List[Any]] = None, + notes: str = "", + **kwargs + ): + """ + create an Experiment node + + Parameters + ---------- + name: str + name of Experiment + process: List[Process] + list of Process nodes for this Experiment + computation: List[Computation] + list of computation nodes for this Experiment + computation_process: List[ComputationalProcess] + list of computational_process nodes for this Experiment + data: List[Data] + list of data nodes for this experiment + funding: List[str] + list of the funders names for this Experiment + citation: List[Citation] + list of Citation nodes for this experiment + notes: str default="" + notes for the experiment node + + Examples + -------- + ```python + # create an experiment node with all possible arguments + my_experiment = cript.Experiment(name="my experiment name") + ``` + + Returns + ------- + None + Instantiate an Experiment node + """ + + if process is None: + process = [] + if computation is None: + computation = [] + if computation_process is None: + computation_process = [] + if data is None: + data = [] + if funding is None: + funding = [] + if citation is None: + citation = [] + + super().__init__(name=name, notes=notes, **kwargs) + + self._json_attrs = replace( + self._json_attrs, + name=name, + process=process, + computation=computation, + computation_process=computation_process, + data=data, + funding=funding, + citation=citation, + notes=notes, + ) + + # check if the code is still valid + self.validate() + + @property + @beartype + def process(self) -> List[Any]: + """ + List of process for experiment + + ```python + # create a simple process node + my_process = cript.Process(name="my process name", type="affinity_pure") + + my_experiment.process = [my_process] + ``` + + Returns + ------- + List[Process] + List of process that were performed in this experiment + """ + return self._json_attrs.process.copy() + + @process.setter + @beartype + def process(self, new_process_list: List[Any]) -> None: + """ + set the list of process for this experiment + + Parameters + ---------- + new_process_list: List[Process] + new process list to replace the current process list + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, process=new_process_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def computation(self) -> List[Any]: + """ + List of the [computations](../computation) in this experiment + + Examples + -------- + ```python + # create computation node + my_computation = cript.Computation(name="my computation name", type="analysis") + + # add computation node to experiment node + simple_experiment_node.computation = [simple_computation_node] + ``` + + Returns + ------- + List[Computation] + List of [computations](../computation) for this experiment + """ + return self._json_attrs.computation.copy() + + @computation.setter + @beartype + def computation(self, new_computation_list: List[Any]) -> None: + """ + set the list of computations for this experiment + + Parameters + ---------- + new_computation_list: List[Computation] + new list of computations to replace the current list of experiments + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, computation=new_computation_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def computation_process(self) -> List[Any]: + """ + List of [computation_process](../computation_process) for this experiment + + Examples + -------- + ```python + my_computation_process = cript.ComputationalProcess( + name="my computational process name", + type="cross_linking", # must come from CRIPT Controlled Vocabulary + input_data=[input_data], # input data is another data node + ingredients=[ingredients], # output data is another data node + ) + + # add computation_process node to experiment node + my_experiment.computation_process = [my_computational_process] + ``` + + Returns + ------- + List[ComputationalProcess] + computational process that were performed in this experiment + """ + return self._json_attrs.computation_process.copy() + + @computation_process.setter + @beartype + def computation_process(self, new_computation_process_list: List[Any]) -> None: + """ + set the list of computation_process for this experiment + + Parameters + ---------- + new_computation_process_list: List[ComputationalProcess] + new list of computations to replace the current for the experiment + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, computation_process=new_computation_process_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def data(self) -> List[Any]: + """ + List of [data nodes](../data) for this experiment + + Examples + -------- + ```python + # create a simple file node + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary", + ) + + # create a simple data node + my_data = cript.Data(name="my data name", type="afm_amp", files=[my_file]) + + my_experiment.data = my_data + ``` + + Returns + ------- + List[Data] + list of [data nodes](../data) that belong to this experiment + """ + return self._json_attrs.data.copy() + + @data.setter + @beartype + def data(self, new_data_list: List[Any]) -> None: + """ + set the list of data for this experiment + + Parameters + ---------- + new_data_list: List[Data] + new list of data to replace the current list for this experiment + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, data=new_data_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def funding(self) -> List[str]: + """ + List of strings of all the funders for this experiment + + Examples + -------- + ```python + my_experiment.funding = ["National Science Foundation", "IRIS", "NIST"] + ``` + + Returns + ------- + List[str] + List of funders for this experiment + """ + return self._json_attrs.funding.copy() + + @funding.setter + @beartype + def funding(self, new_funding_list: List[str]) -> None: + """ + set the list of funders for this experiment + + Parameters + ---------- + new_funding_list: List[str] + replace the current list of funders + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, funding=new_funding_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Any]: + """ + List of [citation](../../subobjects/citation) for this experiment + + Examples + -------- + ```python + # create citation node + my_citation = cript.Citation(type="derived_from", reference=simple_reference_node) + + # add citation to experiment + my_experiment.citations = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of citations of scholarly work that was used in this experiment + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation_list: List[Any]) -> None: + """ + set the list of citations for this experiment + + Parameters + ---------- + new_citations_list: List[Citation] + replace the list of citations for this experiment with a new list of citations + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation_list) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/inventory.py b/src/cript/nodes/primary_nodes/inventory.py new file mode 100644 index 000000000..d35b56207 --- /dev/null +++ b/src/cript/nodes/primary_nodes/inventory.py @@ -0,0 +1,148 @@ +from dataclasses import dataclass, field, replace +from typing import List + +from beartype import beartype + +from cript.nodes.primary_nodes.material import Material +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +class Inventory(PrimaryBaseNode): + """ + ## Definition + An + [Inventory Node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=9) + is a list of material nodes. + An example of an inventory can be a grouping of materials that were extracted from literature + and curated into a group for machine learning, or it can be a subset of chemicals that are used for a + certain type of synthesis. + + ## Attributes + + | Attribute | Type | Example | Description | + |------------|---------------------------------|---------------------|-------------------------------------------| + | material | list[[Material](./material.md)] | | material that you like to group together | + + + ## JSON Representation + ```json + { + "name":"my inventory name", + "node":["Inventory"], + "uid":"_:90f45778-b7c9-4b77-8b83-a6ea9671a937", + "uuid":"90f45778-b7c9-4b77-8b83-a6ea9671a937", + "material":[ + { + "node":["Material"], + "name":"my material 1", + "uid":"_:9679ff12-f9b4-41f4-be95-080b78fa71fd", + "uuid":"9679ff12-f9b4-41f4-be95-080b78fa71fd" + "bigsmiles":"[H]{[>][<]C(C[>])c1ccccc1[]}", + }, + { + "node":["Material"], + "name":"my material 2", + "uid":"_:1ee41708-3531-43eb-8049-4bb91ad73df6", + "uuid":"1ee41708-3531-43eb-8049-4bb91ad73df6" + "bigsmiles":"654321", + } + ] + } + ``` + + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Inventory attributes + """ + + material: List[Material] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, name: str, material: List[Material], notes: str = "", **kwargs) -> None: + """ + Instantiate an inventory node + + Examples + -------- + ```python + material_1 = cript.Material( + name="material 1", + identifiers=[{"alternative_names": "material 1 alternative name"}], + ) + + material_2 = cript.Material( + name="material 2", + identifiers=[{"alternative_names": "material 2 alternative name"}], + ) + + # instantiate inventory node + my_inventory = cript.Inventory( + name="my inventory name", material=[material_1, material_2] + ) + ``` + + Parameters + ---------- + material: List[Material] + list of materials in this inventory + + Returns + ------- + None + instantiate an inventory node + """ + + if material is None: + material = [] + + super().__init__(name=name, notes=notes, **kwargs) + + self._json_attrs = replace(self._json_attrs, material=material) + + @property + @beartype + def material(self) -> List[Material]: + """ + List of [material](../material) in this inventory + + Examples + -------- + ```python + material_3 = cript.Material( + name="new material 3", + identifiers=[{"alternative_names": "new material 3 alternative name"}], + ) + + my_inventory.material = [my_material_3] + ``` + + Returns + ------- + List[Material] + list of material representing the inventory within the collection + """ + return self._json_attrs.material.copy() + + @material.setter + @beartype + def material(self, new_material_list: List[Material]): + """ + set the list of material for this inventory node + + Parameters + ---------- + new_material_list: List[Material] + new list of material to replace the current list of material nodes for this inventory node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, material=new_material_list) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/material.py b/src/cript/nodes/primary_nodes/material.py new file mode 100644 index 000000000..fae97926c --- /dev/null +++ b/src/cript/nodes/primary_nodes/material.py @@ -0,0 +1,445 @@ +from dataclasses import dataclass, field, replace +from typing import Any, Dict, List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode +from cript.nodes.primary_nodes.process import Process + + +class Material(PrimaryBaseNode): + """ + ## Definition + A [Material node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=10) + is a collection of the identifiers and properties of a chemical, mixture, or substance. + + ## Attributes + | attribute | type | example | description | required | vocab | + |---------------------------|----------------------------------------------------------------------|---------------------------------------------------|----------------------------------------------|-------------|-------| + | identifiers | list[Identifier] | | material identifiers | True | | + | component | list[[Material](./)] | | list of component that make up the mixture | | | + | property | list[[Property](../../subobjects/property)] | | material properties | | | + | process | [Process](../process) | | process node that made this material | | | + | parent_material | [Material](./) | | material node that this node was copied from | | | + | computational_ forcefield | [Computation Forcefield](../../subobjects/computational_forcefield) | | computation forcefield | Conditional | | + | keyword | list[str] | [thermoplastic, homopolymer, linear, polyolefins] | words that classify the material | | True | + + ## Navigating to Material + Materials can be easily found on the [CRIPT](https://app.criptapp.org) home screen in the + under the navigation within the [Materials link](https://app.criptapp.org/material/) + + ## Available Sub-Objects for Material + * [Identifier](../../subobjects/identifier) + * [Property](../../subobjects/property) + * [Computational_forcefield](../../subobjects/computational_forcefield) + + Example + ------- + water, brine (water + NaCl), polystyrene, polyethylene glycol hydrogels, vulcanized polyisoprene, mcherry (protein), and mica + + + Warnings + ------- + !!! warning "Material names" + Material names Must be unique within a [Project](../project) + + ```json + { + "node":["Material"], + "name":"my unique material name", + "uid":"_:9679ff12-f9b4-41f4-be95-080b78fa71fd", + "uuid":"9679ff12-f9b4-41f4-be95-080b78fa71fd" + "bigsmiles":"[H]{[>][<]C(C[>])c1ccccc1[]}", + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Material attributes + """ + + # identifier sub-object for the material + identifiers: List[Dict[str, str]] = field(default_factory=dict) # type: ignore + # TODO add proper typing in future, using Any for now to avoid circular import error + component: List["Material"] = field(default_factory=list) + process: Optional[Process] = None + property: List[Any] = field(default_factory=list) + parent_material: Optional["Material"] = None + computational_forcefield: Optional[Any] = None + keyword: List[str] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + name: str, + identifiers: List[Dict[str, str]], + component: Optional[List["Material"]] = None, + process: Optional[Process] = None, + property: Optional[List[Any]] = None, + parent_material: Optional["Material"] = None, + computational_forcefield: Optional[Any] = None, + keyword: Optional[List[str]] = None, + notes: str = "", + **kwargs + ): + """ + create a material node + + Parameters + ---------- + name: str + identifiers: List[Dict[str, str]] + component: List["Material"], default=None + property: Optional[Process], default=None + process: List[Process], default=None + parent_material: "Material", default=None + computational_forcefield: ComputationalForcefield, default=None + keyword: List[str], default=None + + Returns + ------- + None + Instantiate a material node + """ + + super().__init__(name=name, notes=notes, **kwargs) + + if component is None: + component = [] + + if property is None: + property = [] + + if keyword is None: + keyword = [] + + self._json_attrs = replace( + self._json_attrs, + name=name, + identifiers=identifiers, + component=component, + process=process, + property=property, + parent_material=parent_material, + computational_forcefield=computational_forcefield, + keyword=keyword, + ) + + @property + @beartype + def name(self) -> str: + """ + material name + + Examples + ```python + my_material.name = "my new material" + ``` + + Returns + ------- + str + material name + """ + return self._json_attrs.name + + @name.setter + @beartype + def name(self, new_name: str) -> None: + """ + set the name of the material + + Parameters + ---------- + new_name: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, name=new_name) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def identifiers(self) -> List[Dict[str, str]]: + """ + get the identifiers for this material + + ```python + my_material.identifier = {"alternative_names": "my material alternative name"} + ``` + + [material identifier key](https://app.criptapp.org/vocab/material_identifier_key) + must come from CRIPT controlled vocabulary + + Returns + ------- + List[Dict[str, str]] + list of dictionary that has identifiers for this material + """ + return self._json_attrs.identifiers.copy() + + @identifiers.setter + @beartype + def identifiers(self, new_identifiers_list: List[Dict[str, str]]) -> None: + """ + set the list of identifiers for this material + + the identifier keys must come from the + material identifiers keyword within the CRIPT controlled vocabulary + + Parameters + ---------- + new_identifiers_list: List[Dict[str, str]] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, identifiers=new_identifiers_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def component(self) -> List["Material"]: + """ + list of components ([material nodes](./)) that make up this material + + Examples + -------- + ```python + # material component + my_component = [ + # create material node + cript.Material( + name="my component material 1", + identifiers=[{"alternative_names": "component 1 alternative name"}], + ), + + # create material node + cript.Material( + name="my component material 2", + identifiers=[{"alternative_names": "component 2 alternative name"}], + ), + ] + + + identifiers = [{"alternative_names": "my material alternative name"}] + my_material = cript.Material(name="my material", component=my_component, identifiers=identifiers) + ``` + + Returns + ------- + List[Material] + list of component that make up this material + """ + return self._json_attrs.component + + @component.setter + @beartype + def component(self, new_component_list: List["Material"]) -> None: + """ + set the list of component (material nodes) that make up this material + + Parameters + ---------- + new_component_list: List["Material"] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, component=new_component_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def parent_material(self) -> Optional["Material"]: + """ + List of parent materials + + Returns + ------- + List["Material"] + list of parent materials + """ + return self._json_attrs.parent_material + + @parent_material.setter + @beartype + def parent_material(self, new_parent_material: "Material") -> None: + """ + set the [parent materials](./) for this material + + Parameters + ---------- + new_parent_material: "Material" + + Returns + ------- + None + """ + + new_attrs = replace(self._json_attrs, parent_material=new_parent_material) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def computational_forcefield(self) -> Any: + """ + list of [computational_forcefield](../../subobjects/computational_forcefield) for this material node + + Returns + ------- + List[ComputationForcefield] + list of computational_forcefield that created this material + """ + return self._json_attrs.computational_forcefield + + @computational_forcefield.setter + @beartype + def computational_forcefield(self, new_computational_forcefield_list: Any) -> None: + """ + sets the list of computational forcefields for this material + + Parameters + ---------- + new_computation_forcefield_list: List[ComputationalForcefield] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, computational_forcefield=new_computational_forcefield_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def keyword(self) -> List[str]: + """ + List of keyword for this material + + the material keyword must come from the + [CRIPT controlled vocabulary](https://app.criptapp.org/vocab/material_keyword) + + ```python + identifiers = [{"alternative_names": "my material alternative name"}] + + # keyword + material_keyword = ["acetylene", "acrylate", "alternating"] + + my_material = cript.Material( + name="my material", keyword=material_keyword, identifiers=identifiers + ) + ``` + + Returns + ------- + List[str] + list of material keyword + """ + return self._json_attrs.keyword + + @keyword.setter + @beartype + def keyword(self, new_keyword_list: List[str]) -> None: + """ + set the keyword for this material + + the material keyword must come from the CRIPT controlled vocabulary + + Parameters + ---------- + new_keyword_list + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, keyword=new_keyword_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def process(self) -> Optional[Process]: + return self._json_attrs.process # type: ignore + + @process.setter + def process(self, new_process: Process) -> None: + new_attrs = replace(self._json_attrs, process=new_process) + self._update_json_attrs_if_valid(new_attrs) + + @property + def property(self) -> List[Any]: + """ + list of material [property](../../subobjects/property) + + ```python + # property subobject + my_property = cript.Property(key="modulus_shear", type="min", value=1.23, unit="gram") + + my_material.property = my_property + ``` + + Returns + ------- + List[Property] + list of property that define this material + """ + return self._json_attrs.property.copy() + + @property.setter + @beartype + def property(self, new_property_list: List[Any]) -> None: + """ + set the list of properties for this material + + Parameters + ---------- + new_property_list: List[Property] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, property=new_property_list) + self._update_json_attrs_if_valid(new_attrs) + + @classmethod + @beartype + def _from_json(cls, json_dict: Dict): + """ + Create a new instance of a node from a JSON representation. + + Parameters + ---------- + json_dict : Dict + A JSON dictionary representing a node + + Returns + ------- + node + A new instance of a node. + + Notes + ----- + required fields in JSON: + * `name`: The name of the node + + optional fields in JSON: + * `identifiers`: A list of material identifiers. + * If the `identifiers` property is not present in the JSON dictionary, + it will be set to an empty list. + """ + from cript.nodes.util.material_deserialization import ( + _deserialize_flattened_material_identifiers, + ) + + json_dict = _deserialize_flattened_material_identifiers(json_dict) + + return super()._from_json(json_dict) diff --git a/src/cript/nodes/primary_nodes/primary_base_node.py b/src/cript/nodes/primary_nodes/primary_base_node.py new file mode 100644 index 000000000..7b4dc86c6 --- /dev/null +++ b/src/cript/nodes/primary_nodes/primary_base_node.py @@ -0,0 +1,119 @@ +from abc import ABC +from dataclasses import dataclass, replace + +from beartype import beartype + +from cript.nodes.uuid_base import UUIDBaseNode + + +class PrimaryBaseNode(UUIDBaseNode, ABC): + """ + Abstract class that defines what it means to be a PrimaryNode, + and other primary nodes can inherit from. + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + """ + All shared attributes between all Primary nodes and set to their default values + """ + + locked: bool = False + model_version: str = "" + public: bool = False + name: str = "" + notes: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, name: str, notes: str, **kwargs): + # initialize Base class with node + super().__init__(**kwargs) + # replace name and notes within PrimaryBase + self._json_attrs = replace(self._json_attrs, name=name, notes=notes) + + @beartype + def __str__(self) -> str: + """ + Return a string representation of a primary node dataclass attributes. + Every node that inherits from this class should overwrite it to best fit + their use case, but this provides a nice default value just in case + + Examples + -------- + { + 'locked': False, + 'model_version': '', + 'public': False, + 'notes': '' + } + + + Returns + ------- + str + A string representation of the primary node common attributes. + """ + return super().__str__() + + @property + @beartype + def locked(self): + return self._json_attrs.locked + + @property + @beartype + def model_version(self): + return self._json_attrs.model_version + + @property + @beartype + def updated_by(self): + return self._json_attrs.updated_by + + @property + @beartype + def created_by(self): + return self._json_attrs.created_by + + @property + @beartype + def public(self): + return self._json_attrs.public + + @property + @beartype + def name(self): + return self._json_attrs.name + + @name.setter + @beartype + def name(self, new_name: str) -> None: + """ + set the PrimaryBaseNode name + + Parameters + ---------- + new_name: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, name=new_name) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def notes(self): + return self._json_attrs.notes + + @notes.setter + @beartype + def notes(self, new_notes: str) -> None: + """ + allow every node that inherits base attributes to set its notes + """ + new_attrs = replace(self._json_attrs, notes=new_notes) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/process.py b/src/cript/nodes/primary_nodes/process.py new file mode 100644 index 000000000..67ba1667f --- /dev/null +++ b/src/cript/nodes/primary_nodes/process.py @@ -0,0 +1,590 @@ +from dataclasses import dataclass, field, replace +from typing import Any, List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +class Process(PrimaryBaseNode): + """ + ## Definition + The process node contains a list of ingredients, quantities, and procedure information for an experimental material + transformation (chemical and physical). + + ## Attributes + + | attribute | type | example | description | required | vocab | + |-------------------------|------------------|---------------------------------------------------------------------------------|---------------------------------------------------------------------|----------|-------| + | type | str | mix | type of process | True | True | + | ingredient | list[Ingredient] | | ingredients | | | + | description | str | To oven-dried 20 mL glass vial, 5 mL of styrene and 10 ml of toluene was added. | explanation of the process | | | + | equipment | list[Equipment] | | equipment used in the process | | | + | product | list[Material] | | desired material produced from the process | | | + | waste | list[Material] | | material sent to waste | | | + | prerequisite_ processes | list[Process] | | processes that must be completed prior to the start of this process | | | + | condition | list[Condition] | | global process condition | | | + | property | list[Property] | | process properties | | | + | keyword | list[str] | | words that classify the process | | True | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + + ## Can be added to + * [Experiment](../experiment) + + ## Available Subobjects + * [Ingredient](../../subobjects/ingredient) + * [Equipment](../../subobjects/equipment) + * [Property](../../subobjects/property) + * [Condition](../../subobjects/condition) + * [Citation](../../subobjects/citation) + + ## JSON Representation + ```json + { + "name":"my minimal process name", + "node":["Process"], + "type":"affinity_pure", + "keyword":[], + "uid":"_:f8ef33f3-677a-40f3-b24e-65ab2c99d796", + "uuid":"f8ef33f3-677a-40f3-b24e-65ab2c99d796" + } + ``` + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Process attributes + """ + + type: str = "" + # TODO add proper typing in future, using Any for now to avoid circular import error + ingredient: List[Any] = field(default_factory=list) + description: str = "" + equipment: List[Any] = field(default_factory=list) + product: List[Any] = field(default_factory=list) + waste: List[Any] = field(default_factory=list) + prerequisite_process: List["Process"] = field(default_factory=list) + condition: List[Any] = field(default_factory=list) + property: List[Any] = field(default_factory=list) + keyword: List[str] = field(default_factory=list) + citation: List[Any] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + name: str, + type: str, + ingredient: Optional[List[Any]] = None, + description: str = "", + equipment: Optional[List[Any]] = None, + product: Optional[List[Any]] = None, + waste: Optional[List[Any]] = None, + prerequisite_process: Optional[List[Any]] = None, + condition: Optional[List[Any]] = None, + property: Optional[List[Any]] = None, + keyword: Optional[List[str]] = None, + citation: Optional[List[Any]] = None, + notes: str = "", + **kwargs + ) -> None: + """ + create a process node + + ```python + my_process = cript.Process(name="my process name", type="affinity_pure") + ``` + + Parameters + ---------- + ingredient: List[Ingredient] + [ingredient](../../subobjects/ingredient) used in this process + type: str = "" + Process type must come from + [CRIPT Controlled vocabulary process type](https://app.criptapp.org/vocab/process-type/) + description: str = "" + description of this process + equipment: List[Equipment] = None + list of [equipment](../../subobjects/equipment) used in this process + product: List[Material] = None + product that this process created + waste: List[Material] = None + waste that this process created + condition: List[Condition] = None + list of [condition](../../subobjects/condition) that this process was created under + property: List[Property] = None + list of [properties](../../subobjects/property) for this process + keyword: List[str] = None + list of keywords for this process must come from + [CRIPT process keyword controlled keyword](https://app.criptapp.org/vocab/process-keyword/) + citation: List[Citation] = None + list of [citation](../../subobjects/citation) + + Returns + ------- + None + instantiate a process node + """ + + if ingredient is None: + ingredient = [] + + if equipment is None: + equipment = [] + + if product is None: + product = [] + + if waste is None: + waste = [] + + if prerequisite_process is None: + prerequisite_process = [] + + if condition is None: + condition = [] + + if property is None: + property = [] + + if keyword is None: + keyword = [] + + if citation is None: + citation = [] + + super().__init__(name=name, notes=notes, **kwargs) + + new_attrs = replace( + self._json_attrs, + ingredient=ingredient, + type=type, + description=description, + equipment=equipment, + product=product, + waste=waste, + condition=condition, + prerequisite_process=prerequisite_process, + property=property, + keyword=keyword, + citation=citation, + ) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def type(self) -> str: + """ + [Process type](https://app.criptapp.org/vocab/process_type) must come from the CRIPT controlled vocabulary + + Examples + -------- + ```python + my_process.type = "affinity_pure" + ``` + + Returns + ------- + str + Select a [Process type](https://app.criptapp.org/vocab/process-type/) from CRIPT controlled vocabulary + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_process_type: str) -> None: + """ + set process type from CRIPT controlled vocabulary + + Parameters + ---------- + new_process_type: str + new process type from CRIPT controlled vocabulary + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, type=new_process_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def ingredient(self) -> List[Any]: + """ + List of [ingredient](../../subobjects/ingredient) for this process + + Examples + --------- + ```python + my_ingredients = cript.Ingredient( + material=simple_material_node, + quantities=[simple_quantity_node], + ) + + my_process.ingredient = [my_ingredients] + ``` + + Returns + ------- + List[Ingredient] + list of ingredients for this process + """ + return self._json_attrs.ingredient.copy() + + @ingredient.setter + @beartype + def ingredient(self, new_ingredient_list: List[Any]) -> None: + """ + set the list of the ingredients for this process + + Parameters + ---------- + new_ingredient_list + list of ingredients to replace the current list + + Returns + ------- + None + """ + # TODO need to validate with CRIPT controlled vocabulary + # and if invalid then raise an error immediately + new_attrs = replace(self._json_attrs, ingredient=new_ingredient_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def description(self) -> str: + """ + description of this process + + Examples + -------- + ```python + my_process.description = "To oven-dried 20 mL glass vial, 5 mL of styrene and 10 ml of toluene was added" + ``` + + Returns + ------- + str + description of this process + """ + return self._json_attrs.description + + @description.setter + @beartype + def description(self, new_description: str) -> None: + """ + set the description of this process + + Parameters + ---------- + new_description: str + new process description to replace the current one + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, description=new_description) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def equipment(self) -> List[Any]: + """ + List of [equipment](../../subobjects/equipment) used for this process + + Returns + ------- + List[Equipment] + list of equipment used for this process + """ + return self._json_attrs.equipment.copy() + + @equipment.setter + @beartype + def equipment(self, new_equipment_list: List[Any]) -> None: + """ + set the list of equipment used for this process + + Parameters + ---------- + new_equipment_list + new equipment list to replace the current equipment list for this process + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, equipment=new_equipment_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def product(self) -> List[Any]: + """ + List of product (material nodes) for this process + + Returns + ------- + List[Material] + List of process product (Material nodes) + """ + return self._json_attrs.product.copy() + + @product.setter + @beartype + def product(self, new_product_list: List[Any]) -> None: + """ + set the product list for this process + + Parameters + ---------- + new_product_list: List[Material] + replace the current list of process product + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, product=new_product_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def waste(self) -> List[Any]: + """ + List of waste that resulted from this process + + Examples + -------- + ```python + my_process.waste = my_waste_material + ``` + + Returns + ------- + List[Material] + list of waste materials that resulted from this product + """ + return self._json_attrs.waste.copy() + + @waste.setter + @beartype + def waste(self, new_waste_list: List[Any]) -> None: + """ + set the list of waste (Material node) for that resulted from this process + + Parameters + ---------- + new_waste_list: List[Material] + replace the list waste that resulted from this process + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, waste=new_waste_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def prerequisite_process(self) -> List["Process"]: + """ + list of prerequisite process nodes + + Examples + -------- + ```python + + my_prerequisite_process = [ + cript.Process(name="prerequisite processes 1", type="blow_molding"), + cript.Process(name="prerequisite processes 2", type="centrifugation"), + ] + + my_process.prerequisite_process = my_prerequisite_process + ``` + + Returns + ------- + List[Process] + list of process that had to happen before this process + """ + return self._json_attrs.prerequisite_process.copy() + + @prerequisite_process.setter + @beartype + def prerequisite_process(self, new_prerequisite_process_list: List["Process"]) -> None: + """ + set the prerequisite_process for the process node + + Parameters + ---------- + new_prerequisite_process_list: List["Process"] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, prerequisite_process=new_prerequisite_process_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def condition(self) -> List[Any]: + """ + List of condition present for this process + + Examples + ------- + ```python + # create condition node + my_condition = cript.Condition(key="atm", type="min", value=1) + + my_process.condition = [my_condition] + ``` + + Returns + ------- + List[Condition] + list of condition for this process node + """ + return self._json_attrs.condition.copy() + + @condition.setter + @beartype + def condition(self, new_condition_list: List[Any]) -> None: + """ + set the list of condition for this process + + Parameters + ---------- + new_condition_list: List[Condition] + replace the condition list + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, condition=new_condition_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def keyword(self) -> List[str]: + """ + List of keyword for this process + + [Process keyword](https://app.criptapp.org/vocab/process-keyword/) must come from CRIPT controlled vocabulary + + Returns + ------- + List[str] + list of keywords for this process nod + """ + return self._json_attrs.keyword.copy() # type: ignore + + @keyword.setter + @beartype + def keyword(self, new_keyword_list: List[str]) -> None: + """ + set the list of keyword for this process from CRIPT controlled vocabulary + + Parameters + ---------- + new_keyword_list: List[str] + replace the current list of keyword + + Returns + ------- + None + """ + # TODO validate with CRIPT controlled vocabulary + new_attrs = replace(self._json_attrs, keyword=new_keyword_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Any]: + """ + List of citation for this process + + Examples + -------- + ```python + # crate reference node for this citation + my_reference = cript.Reference(type="journal_article", title="'Living' Polymers") + + # create citation node + my_citation = cript.Citation(type="derived_from", reference=my_reference) + + my_process.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of citation for this process node + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation_list: List[Any]) -> None: + """ + set the list of citation for this process + + Parameters + ---------- + new_citation_list: List[Citation] + replace the current list of citation + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def property(self) -> List[Any]: + """ + List of [Property nodes](../../subobjects/property) for this process + + Examples + -------- + ```python + # create property node + my_property = cript.Property(key="modulus_shear", type="min", value=1.23, unit="gram") + + my_process.properties = [my_property] + ``` + + Returns + ------- + List[Property] + list of properties for this process + """ + return self._json_attrs.property.copy() + + @property.setter + @beartype + def property(self, new_property_list: List[Any]) -> None: + """ + set the list of Property nodes for this process + + Parameters + ---------- + new_property_list: List[Property] + replace the current list of properties + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, property=new_property_list) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/project.py b/src/cript/nodes/primary_nodes/project.py new file mode 100644 index 000000000..06805251f --- /dev/null +++ b/src/cript/nodes/primary_nodes/project.py @@ -0,0 +1,232 @@ +from dataclasses import dataclass, field, replace +from typing import List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.collection import Collection +from cript.nodes.primary_nodes.material import Material +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode +from cript.nodes.supporting_nodes import User + + +class Project(PrimaryBaseNode): + """ + ## Definition + A [Project](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=7) + is the highest level node that is Not nested inside any other node. + A Project can be thought of as a folder that can contain [Collections](../collection) and + [Materials](../material). + + + | attribute | type | description | + |-------------|------------------|----------------------------------------| + | collection | List[Collection] | collections that relate to the project | + | materials | List[Materials] | materials owned by the project | + + ## JSON Representation + ```json + { + "name":"my project name", + "node":["Project"], + "uid":"_:270168b7-fc29-4c37-aa93-334212e1d962", + "uuid":"270168b7-fc29-4c37-aa93-334212e1d962", + "collection":[ + { + "name":"my collection name", + "node":["Collection"], + "uid":"_:c60955a5-4de0-4da5-b2c8-77952b1d9bfa", + "uuid":"c60955a5-4de0-4da5-b2c8-77952b1d9bfa", + "experiment":[ + { + "name":"my experiment name", + "node":["Experiment"], + "uid":"_:a8cbc083-506e-45ce-bb8f-5e50917ab361", + "uuid":"a8cbc083-506e-45ce-bb8f-5e50917ab361" + } + ], + "inventory":[], + "citation":[] + } + ] + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all Project attributes + """ + + member: List[User] = field(default_factory=list) + admin: List[User] = field(default_factory=list) + collection: List[Collection] = field(default_factory=list) + material: List[Material] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, name: str, collection: Optional[List[Collection]] = None, material: Optional[List[Material]] = None, notes: str = "", **kwargs): + """ + Create a Project node with Project name and Group + + Parameters + ---------- + name: str + project name + collection: List[Collection] + list of Collections that belongs to this Project + material: List[Material] + list of materials that belongs to this project + notes: str + notes for this project + + Returns + ------- + None + instantiate a Project node + """ + super().__init__(name=name, notes=notes, **kwargs) + + if collection is None: + collection = [] + + if material is None: + material = [] + + self._json_attrs = replace(self._json_attrs, name=name, collection=collection, material=material) + self.validate() + + def validate(self, api=None, is_patch=False): + from cript.nodes.exceptions import ( + CRIPTOrphanedMaterialError, + get_orphaned_experiment_exception, + ) + + # First validate like other nodes + super().validate(api=api, is_patch=is_patch) + + # Check graph for orphaned nodes, that should be listed in project + # Project.materials should contain all material nodes + project_graph_materials = self.find_children({"node": ["Material"]}) + # Combine all materials listed in the project inventories + project_inventory_materials = [] + for inventory in self.find_children({"node": ["Inventory"]}): + for material in inventory.material: + project_inventory_materials.append(material) + for material in project_graph_materials: + if material not in self.material and material not in project_inventory_materials: + raise CRIPTOrphanedMaterialError(material) + + # Check graph for orphaned nodes, that should be listed in the experiments + project_experiments = self.find_children({"node": ["Experiment"]}) + # There are 4 different types of nodes Experiments are collecting. + node_types = ("Process", "Computation", "ComputationProcess", "Data") + # We loop over them with the same logic + for node_type in node_types: + # All in the graph has to be in at least one experiment + project_graph_nodes = self.find_children({"node": [node_type]}) + node_type_attr = node_type.lower() + # Non-consistent naming makes this necessary for Computation Process + if node_type == "ComputationProcess": + node_type_attr = "computation_process" + + # Concatenation of all experiment attributes (process, computation, etc.) + # Every node of the graph must be present somewhere in this concatenated list. + experiment_nodes = [] + for experiment in project_experiments: + for ex_node in getattr(experiment, node_type_attr): + experiment_nodes.append(ex_node) + for node in project_graph_nodes: + if node not in experiment_nodes: + raise get_orphaned_experiment_exception(node) + + @property + @beartype + def member(self) -> List[User]: + return self._json_attrs.member.copy() + + @property + @beartype + def admin(self) -> List[User]: + return self._json_attrs.admin + + @property + @beartype + def collection(self) -> List[Collection]: + """ + Collection is a Project node's property that can be set during creation in the constructor + or later by setting the project's property + + Examples + -------- + ```python + my_new_collection = cript.Collection( + name="my collection name", experiments=[my_experiment_node] + ) + + my_project.collection = my_new_collection + ``` + + Returns + ------- + Collection: List[Collection] + the list of collections within this project + """ + return self._json_attrs.collection + + @collection.setter + @beartype + def collection(self, new_collection: List[Collection]) -> None: + """ + set list of collections for the project node + + Parameters + ---------- + new_collection: List[Collection] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, collection=new_collection) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def material(self) -> List[Material]: + """ + List of Materials that belong to this Project. + + Examples + -------- + ```python + identifiers = [{"alternative_names": "my material alternative name"}] + my_material = cript.Material(name="my material", identifiers=identifiers) + + my_project.material = [my_material] + ``` + + Returns + ------- + Material: List[Material] + List of materials that belongs to this project + """ + return self._json_attrs.material + + @material.setter + @beartype + def material(self, new_materials: List[Material]) -> None: + """ + set the list of materials for this project + + Parameters + ---------- + new_materials: List[Material] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, material=new_materials) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/primary_nodes/reference.py b/src/cript/nodes/primary_nodes/reference.py new file mode 100644 index 000000000..e4ba0603f --- /dev/null +++ b/src/cript/nodes/primary_nodes/reference.py @@ -0,0 +1,689 @@ +from dataclasses import dataclass, field, replace +from typing import List, Optional, Union + +from beartype import beartype + +from cript.nodes.uuid_base import UUIDBaseNode + + +class Reference(UUIDBaseNode): + """ + ## Definition + + The + [Reference node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=15) + + contains the metadata for a literature publication, book, or anything external to CRIPT. + The reference node does NOT contain the base attributes. + + The reference node is always used inside the citation + sub-object to enable users to specify the context of the reference. + + ## Attributes + | attribute | type | example | description | required | vocab | + |-----------|-----------|--------------------------------------------|-----------------------------------------------|---------------|-------| + | type | str | journal_article | type of literature | True | True | + | title | str | 'Living' Polymers | title of publication | True | | + | author | list[str] | Michael Szwarc | list of authors | | | + | journal | str | Nature | journal of the publication | | | + | publisher | str | Springer | publisher of publication | | | + | year | int | 1956 | year of publication | | | + | volume | int | 178 | volume of publication | | | + | issue | int | 0 | issue of publication | | | + | pages | list[int] | [1168, 1169] | page range of publication | | | + | doi | str | 10.1038/1781168a0 | DOI: digital object identifier | Conditionally | | + | issn | str | 1476-4687 | ISSN: international standard serial number | Conditionally | | + | arxiv_id | str | 1501 | arXiv identifier | | | + | pmid | int | ######## | PMID: PubMed ID | | | + | website | str | https://www.nature.com/artic les/1781168a0 | website where the publication can be accessed | | | + + + ## Can be added to + * [Citation](../../subobjects/citation) + + ## Available Subobjects + * None + + !!! warning "Reference will always be public" + Reference node is meant to always be public and static to allow globally link data to the reference + + ## JSON Representation + ```json + { + "node":["Reference"], + "uid":"_:c681a947-0554-4acd-a01c-06ad76e34b87", + "uuid":"c681a947-0554-4acd-a01c-06ad76e34b87", + "author":["Ludwig Schneider","Marcus Müller"], + "doi":"10.1016/j.cpc.2018.08.011", + "issn":"0010-4655", + "journal":"Computer Physics Communications", + "pages":[463,476], + "publisher":"Elsevier", + "title":"Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: SOft coarse grained Monte-Carlo Acceleration (SOMA)", + "type":"journal_article", + "website":"https://www.sciencedirect.com/science/article/pii/S0010465518303072", + "year":2019 + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + """ + all reference nodes attributes + + all int types are also None type in case they are not present it should be properly shown as None + instead of a placeholder number such as 0 or -1 + """ + + type: str = "" + title: str = "" + author: List[str] = field(default_factory=list) + journal: str = "" + publisher: str = "" + year: Optional[int] = None + volume: Optional[int] = None + issue: Optional[int] = None + pages: List[int] = field(default_factory=list) + doi: str = "" + issn: str = "" + arxiv_id: str = "" + pmid: Optional[int] = None + website: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + type: str, + title: str, + author: Optional[List[str]] = None, + journal: str = "", + publisher: str = "", + year: Optional[int] = None, + volume: Optional[int] = None, + issue: Optional[int] = None, + pages: Optional[List[int]] = None, + doi: str = "", + issn: str = "", + arxiv_id: str = "", + pmid: Optional[int] = None, + website: str = "", + **kwargs, + ): + """ + create a reference node + + reference type must come from CRIPT controlled vocabulary + + Parameters + ---------- + type: str + type of literature. + The reference type must come from CRIPT controlled vocabulary + title: str + title of publication + author: List[str] default="" + list of authors + journal: str default="" + journal of publication + publisher: str default="" + publisher of publication + year: int default=None + year of publication + volume: int default=None + volume of publication + issue: int default=None + issue of publication + pages: List[int] default=None + page range of publication + doi: str default="" + DOI: digital object identifier + issn: str default="" + ISSN: international standard serial number + arxiv_id: str default="" + arXiv identifier + pmid: int default=None + PMID: PubMed ID + website: str default="" + website where the publication can be accessed + + + Examples + -------- + ```python + my_reference = cript.Reference(type="journal_article", title="'Living' Polymers") + ``` + + Returns + ------- + None + Instantiate a reference node + """ + if author is None: + author = [] + + if pages is None: + pages = [] + + super().__init__(**kwargs) + + new_attrs = replace(self._json_attrs, type=type, title=title, author=author, journal=journal, publisher=publisher, year=year, volume=volume, issue=issue, pages=pages, doi=doi, issn=issn, arxiv_id=arxiv_id, pmid=pmid, website=website) + + self._update_json_attrs_if_valid(new_attrs) + self.validate() + + @property + @beartype + def type(self) -> str: + """ + Type of reference. + + The [reference type](https://app.criptapp.org/vocab/reference_type) + must come from the CRIPT controlled vocabulary + + Examples + -------- + ```python + my_reference.type = "journal_article" + ``` + + Returns + ------- + str + reference type + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_reference_type: str) -> None: + """ + set the reference type attribute + + reference type must come from the CRIPT controlled vocabulary + + Parameters + ---------- + new_reference_type: str + + Returns + ------- + None + """ + # TODO validate the reference type with CRIPT controlled vocabulary + new_attrs = replace(self._json_attrs, type=new_reference_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def title(self) -> str: + """ + title of publication + + Examples + -------- + ```python + my_reference.title = "my new title" + ``` + + Returns + ------- + str + title of publication + """ + return self._json_attrs.title + + @title.setter + @beartype + def title(self, new_title: str) -> None: + """ + set the title for the reference node + + Parameters + ---------- + new_title: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, title=new_title) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def author(self) -> List[str]: + """ + List of authors for this reference node + + Examples + -------- + ```python + my_reference.author = ["Bradley D. Olsen", "Dylan Walsh"] + ``` + + Returns + ------- + List[str] + list of authors + """ + return self._json_attrs.author.copy() + + @author.setter + @beartype + def author(self, new_author: List[str]) -> None: + """ + set the list of authors for the reference node + + Parameters + ---------- + new_author: List[str] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, author=new_author) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def journal(self) -> str: + """ + journal of publication + + Examples + -------- + ```python + my_reference.journal = "my new journal" + ``` + + Returns + ------- + str + journal of publication + """ + return self._json_attrs.journal + + @journal.setter + @beartype + def journal(self, new_journal: str) -> None: + """ + set the journal attribute for this reference node + + Parameters + ---------- + new_journal: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, journal=new_journal) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def publisher(self) -> str: + """ + publisher for this reference node + + Examples + -------- + ```python + my_reference.publisher = "my new publisher" + ``` + + Returns + ------- + str + publisher of this publication + """ + return self._json_attrs.publisher + + @publisher.setter + @beartype + def publisher(self, new_publisher: str) -> None: + """ + set the publisher for this reference node + + Parameters + ---------- + new_publisher: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, publisher=new_publisher) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def year(self) -> Union[int, None]: + """ + year for the scholarly work + + Examples + -------- + ```python + my_reference.year = 2023 + ``` + + Returns + ------- + int + """ + return self._json_attrs.year + + @year.setter + @beartype + def year(self, new_year: Union[int, None]) -> None: + """ + set the year for the scholarly work within the reference node + + Parameters + ---------- + new_year: int + + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, year=new_year) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def volume(self) -> Union[int, None]: + """ + Volume of the scholarly work from the reference node + + Examples + -------- + ```python + my_reference.volume = 1 + ``` + + Returns + ------- + int + volume number of the publishing + """ + return self._json_attrs.volume + + @volume.setter + @beartype + def volume(self, new_volume: Union[int, None]) -> None: + """ + set the volume of the scholarly work for this reference node + + Parameters + ---------- + new_volume: int + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, volume=new_volume) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def issue(self) -> Union[int, None]: + """ + issue of the scholarly work for the reference node + + Examples + -------- + ```python + my_reference.issue = 2 + ``` + + Returns + ------- + None + """ + return self._json_attrs.issue + + @issue.setter + @beartype + def issue(self, new_issue: Union[int, None]) -> None: + """ + set the issue of the scholarly work + + Parameters + ---------- + new_issue: int + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, issue=new_issue) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def pages(self) -> List[int]: + """ + pages of the scholarly work used in the reference node + + Examples + -------- + ```python + my_reference.pages = [123, 456] + ``` + + Returns + ------- + int + """ + return self._json_attrs.pages.copy() + + @pages.setter + @beartype + def pages(self, new_pages_list: List[int]) -> None: + """ + set the list of pages of the scholarly work for this reference node + + Parameters + ---------- + new_pages_list: List[int] + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, pages=new_pages_list) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def doi(self) -> str: + """ + get the digital object identifier (DOI) for this reference node + + Examples + -------- + ```python + my_reference.doi = "100.1038/1781168a0" + ``` + + Returns + ------- + str + digital object identifier (DOI) for this reference node + """ + return self._json_attrs.doi + + @doi.setter + @beartype + def doi(self, new_doi: str) -> None: + """ + set the digital object identifier (DOI) for the scholarly work for this reference node + + Parameters + ---------- + new_doi: str + + Examples + -------- + ```python + my_reference.doi = "100.1038/1781168a0" + ``` + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, doi=new_doi) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def issn(self) -> str: + """ + The international standard serial number (ISSN) for this reference node + + Examples + ```python + my_reference.issn = "1456-4687" + ``` + + Returns + ------- + str + ISSN for this reference node + """ + return self._json_attrs.issn + + @issn.setter + @beartype + def issn(self, new_issn: str) -> None: + """ + set the international standard serial number (ISSN) for the scholarly work for this reference node + + Parameters + ---------- + new_issn: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, issn=new_issn) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def arxiv_id(self) -> str: + """ + The arXiv identifier for the scholarly work for this reference node + + Examples + -------- + ```python + my_reference.arxiv_id = "1501" + ``` + + Returns + ------- + str + arXiv identifier for the scholarly work for this publishing + """ + return self._json_attrs.arxiv_id + + @arxiv_id.setter + @beartype + def arxiv_id(self, new_arxiv_id: str) -> None: + """ + set the arXiv identifier for the scholarly work for this reference node + + Parameters + ---------- + new_arxiv_id: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, arxiv_id=new_arxiv_id) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def pmid(self) -> Union[int, None]: + """ + The PubMed ID (PMID) for this reference node + + Examples + -------- + ```python + my_reference.pmid = 12345678 + ``` + + Returns + ------- + int + the PubMedID of this publishing + """ + return self._json_attrs.pmid + + @pmid.setter + @beartype + def pmid(self, new_pmid: Union[int, None]) -> None: + """ + + Parameters + ---------- + new_pmid + + Returns + ------- + + """ + # TODO can possibly add validations, possibly in forms of length checking + # to be sure its the correct length + new_attrs = replace(self._json_attrs, pmid=new_pmid) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def website(self) -> str: + """ + The website URL for the scholarly work + + Examples + -------- + ```python + my_reference.website = "https://criptapp.org" + ``` + + Returns + ------- + str + the website URL of this publishing + """ + return self._json_attrs.website + + @website.setter + @beartype + def website(self, new_website: str) -> None: + """ + set the website URL for the scholarly work + + Parameters + ---------- + new_website: str + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, website=new_website) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/__init__.py b/src/cript/nodes/subobjects/__init__.py new file mode 100644 index 000000000..f14afc47a --- /dev/null +++ b/src/cript/nodes/subobjects/__init__.py @@ -0,0 +1,12 @@ +# trunk-ignore-all(ruff/F401) +from cript.nodes.subobjects.algorithm import Algorithm +from cript.nodes.subobjects.citation import Citation +from cript.nodes.subobjects.computational_forcefield import ComputationalForcefield +from cript.nodes.subobjects.condition import Condition +from cript.nodes.subobjects.equipment import Equipment +from cript.nodes.subobjects.ingredient import Ingredient +from cript.nodes.subobjects.parameter import Parameter +from cript.nodes.subobjects.property import Property +from cript.nodes.subobjects.quantity import Quantity +from cript.nodes.subobjects.software import Software +from cript.nodes.subobjects.software_configuration import SoftwareConfiguration diff --git a/src/cript/nodes/subobjects/algorithm.py b/src/cript/nodes/subobjects/algorithm.py new file mode 100644 index 000000000..9f05dcd17 --- /dev/null +++ b/src/cript/nodes/subobjects/algorithm.py @@ -0,0 +1,268 @@ +from dataclasses import dataclass, field, replace +from typing import List, Optional + +from cript.nodes.subobjects.citation import Citation +from cript.nodes.subobjects.parameter import Parameter +from cript.nodes.uuid_base import UUIDBaseNode + + +class Algorithm(UUIDBaseNode): + """ + ## Definition + + An [algorithm sub-object](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=25) + is a set of instructions that define a computational process. + An algorithm consists of parameters that are used in the computation and the computational process itself. + + + ## Attributes + + | Keys | Type | Example | Description | Required | Vocab | + |-----------|-----------------|----------------------------------------------|--------------------------------------------------------|----------|-------| + | key | str | ensemble, thermo-barostat | system configuration, algorithms used in a computation | True | True | + | type | str | NPT for ensemble, Nose-Hoover for thermostat | specific type of configuration, algorithm | True | | + | parameter | list[Parameter] | | setup associated parameters | | | + | citation | Citation | | reference to a book, paper, or scholarly work | | | + + ## Can be Added To + * [SoftwareConfiguration](../software_configuration) + + ## Available sub-objects + * [Parameter](../parameter) + * [Citation](../citation) + + ## JSON Representation + ```json + { + "node": ["Algorithm"], + "key": "mc_barostat", + "type": "barostat", + "parameter": { + "node": ["Parameter"], + "key": "update_frequency", + "value": 1000.0, + "unit": "1/second" + }, + "citation": { + "node": ["Citation"], + "type": "reference" + "reference": { + "node": ["Reference"], + "type": "journal_article", + "title": "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: SOft coarse grained Monte-Carlo Acceleration (SOMA)", + "author": ["Ludwig Schneider", "Marcus Müller"], + "journal": "Computer Physics Communications", + "publisher": "Elsevier", + "year": 2019, + "pages": [463, 476], + "doi": "10.1016/j.cpc.2018.08.011", + "issn": "0010-4655", + "website": "https://www.sciencedirect.com/science/article/pii/S0010465518303072", + }, + }, + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + type: str = "" + + parameter: List[Parameter] = field(default_factory=list) + citation: List[Citation] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + def __init__(self, key: str, type: str, parameter: Optional[List[Parameter]] = None, citation: Optional[List[Citation]] = None, **kwargs): # ignored + """ + create algorithm sub-object + + Parameters + ---------- + key : str + algorithm key must come from [CRIPT controlled vocabulary]() + type : str + algorithm type must come from [CRIPT controlled vocabulary]() + parameter : List[Parameter], optional + parameter sub-object, by default None + citation : List[Citation], optional + citation sub-object, by default None + + Examples + -------- + ```python + # create algorithm sub-object + algorithm = cript.Algorithm(key="mc_barostat", type="barostat") + ``` + + Returns + ------- + None + instantiate an algorithm node + """ + if parameter is None: + parameter = [] + if citation is None: + citation = [] + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, key=key, type=type, parameter=parameter) + self.validate() + + @property + def key(self) -> str: + """ + Algorithm key + + Algorithm key must come from [CRIPT controlled vocabulary](https://app.criptapp.org/vocab/algorithm_key) + + Examples + -------- + ```python + algorithm.key = "amorphous_cell_module" + ``` + + Returns + ------- + str + algorithm key + """ + return self._json_attrs.key + + @key.setter + def key(self, new_key: str) -> None: + """ + set the algorithm key + + > Algorithm key must come from [CRIPT Controlled Vocabulary]() + + Parameters + ---------- + new_key : str + algorithm key + """ + new_attrs = replace(self._json_attrs, key=new_key) + self._update_json_attrs_if_valid(new_attrs) + + @property + def type(self) -> str: + """ + Algorithm type + + > Algorithm type must come from [CRIPT controlled vocabulary]() + + Examples + -------- + ```python + my_algorithm.type = "integration" + ``` + + Returns + ------- + str + algorithm type + """ + return self._json_attrs.type + + @type.setter + def type(self, new_type: str) -> None: + new_attrs = replace(self._json_attrs, type=new_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + def parameter(self) -> List[Parameter]: + """ + list of [Parameter](../parameter) sub-objects for the algorithm sub-object + + Examples + -------- + ```python + # create parameter sub-object + my_parameter = [ + cript.Parameter("update_frequency", 1000.0, "1/second") + cript.Parameter("damping_time", 1.0, "second") + ] + + # add parameter sub-object to algorithm sub-object + algorithm.parameter = my_parameter + ``` + + Returns + ------- + List[Parameter] + list of parameters for the algorithm sub-object + """ + return self._json_attrs.parameter.copy() + + @parameter.setter + def parameter(self, new_parameter: List[Parameter]) -> None: + """ + set a list of cript.Parameter sub-objects + + Parameters + ---------- + new_parameter : List[Parameter] + list of Parameter sub-objects for the algorithm sub-object + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, parameter=new_parameter) + self._update_json_attrs_if_valid(new_attrs) + + @property + def citation(self) -> Citation: + """ + [citation](../citation) subobject for algorithm subobject + + Examples + -------- + ```python + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "SOft coarse grained Monte-Carlo Acceleration (SOMA)" + + # create reference node + my_reference = cript.Reference( + type="journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + # create citation sub-object and add reference to it + my_citation = cript.Citation(type="reference, reference==my_reference) + + # add citation to algorithm node + algorithm.citation = my_citation + ``` + + Returns + ------- + citation node: Citation + get the algorithm citation node + """ + return self._json_attrs.citation.copy() # type: ignore + + @citation.setter + def citation(self, new_citation: Citation) -> None: + """ + set the algorithm citation subobject + + Parameters + ---------- + new_citation : Citation + new citation subobject to replace the current + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/citation.py b/src/cript/nodes/subobjects/citation.py new file mode 100644 index 000000000..e3aae8bac --- /dev/null +++ b/src/cript/nodes/subobjects/citation.py @@ -0,0 +1,201 @@ +from dataclasses import dataclass, replace +from typing import Optional, Union + +from beartype import beartype + +from cript.nodes.primary_nodes.reference import Reference +from cript.nodes.uuid_base import UUIDBaseNode + + +class Citation(UUIDBaseNode): + """ + ## Definition + The [Citation sub-object](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=26) + essentially houses [Reference nodes](../../primary_nodes/reference). The citation subobject can then be added to CRIPT Primary nodes. + + ## Attributes + | attribute | type | example | description | required | vocab | + |-----------|-----------|--------------|-----------------------------------------------|----------|-------| + | type | str | derived_from | key for identifier | True | True | + | reference | Reference | | reference to a book, paper, or scholarly work | True | | + + ## Can Be Added To + * [Collection node](../../primary_nodes/collection) + * [Computation node](../../primary_nodes/computation) + * [Computation Process Node](../../primary_nodes/computation_process) + * [Data node](../../primary_nodes/data) + + * [Computational Forcefield subobjects](../computational_forcefield) + * [Property subobject](../property) + * [Algorithm subobject](../algorithm) + * [Equipment subobject](../equipment) + + --- + + ## Available Subobjects + * `None` + + ## JSON Representation + ```json + "citation": { + "node": ["Citation"], + "type": "reference", + "reference": { + "node": ["Reference"], + "type": "journal_article", + "title": "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: SOft coarse grained Monte-Carlo Acceleration (SOMA)", + "author": ["Ludwig Schneider", "Marcus Müller"], + "journal": "Computer Physics Communications", + "publisher": "Elsevier", + "year": 2019, + "pages": [463, 476], + "doi": "10.1016/j.cpc.2018.08.011", + "issn": "0010-4655", + "website": "https://www.sciencedirect.com/science/article/pii/S0010465518303072", + }, + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + type: str = "" + reference: Optional[Reference] = None + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, type: str, reference: Reference, **kwargs): + """ + create a Citation subobject + + Parameters + ---------- + type : citation type + citation type must come from [CRIPT Controlled Vocabulary]() + reference : Reference + Reference node + + Examples + ------- + ```python + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "SOft coarse grained Monte-Carlo Acceleration (SOMA)" + + # create a Reference node for the Citation subobject + my_reference = Reference( + "journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + # create Citation subobject + my_citation = cript.Citation("reference", my_reference) + ``` + + Returns + ------- + None + Instantiate citation subobject + """ + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, type=type, reference=reference) + self.validate() + + @property + @beartype + def type(self) -> str: + """ + Citation type subobject + + Citation type must come from [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/citation_type) + + Examples + -------- + ```python + my_citation.type = "extracted_by_algorithm" + ``` + + Returns + ------- + str + Citation type + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_type: str) -> None: + """ + set the citation subobject type + + > Note: citation subobject must come from [CRIPT Controlled Vocabulary]() + + Parameters + ---------- + new_type : str + citation type + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, type=new_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def reference(self) -> Union[Reference, None]: + """ + citation reference node + + Examples + -------- + ```python + # create a Reference node for the Citation subobject + my_reference = Reference( + "journal_article", + title="my title", + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + my_citation.reference = my_reference + ``` + + Returns + ------- + Reference + Reference node + """ + return self._json_attrs.reference + + @reference.setter + @beartype + def reference(self, new_reference: Reference) -> None: + """ + replace the current Reference node for the citation subobject + + Parameters + ---------- + new_reference : Reference + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, reference=new_reference) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/computational_forcefield.py b/src/cript/nodes/subobjects/computational_forcefield.py new file mode 100644 index 000000000..45416f0da --- /dev/null +++ b/src/cript/nodes/subobjects/computational_forcefield.py @@ -0,0 +1,478 @@ +from dataclasses import dataclass, field, replace +from typing import List, Optional + +from beartype import beartype + +from cript.nodes.primary_nodes.data import Data +from cript.nodes.subobjects.citation import Citation +from cript.nodes.uuid_base import UUIDBaseNode + + +class ComputationalForcefield(UUIDBaseNode): + """ + ## Definition + A [Computational Forcefield Subobject](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=23) + is a mathematical model that describes the forces between atoms and molecules. + It is used in computational chemistry and molecular dynamics simulations to predict the behavior of materials. + Forcefields are typically based on experimental data or quantum mechanical calculations, + and they are often used to study the properties of materials such as their structure, dynamics, and reactivity. + + ## Attributes + | attribute | type | example | description | required | vocab | + |------------------------|----------------|------------------------------------------------------------------------|--------------------------------------------------------------------------|----------|-------| + | key | str | CHARMM27 | type of forcefield | True | True | + | building_block | str | atom | type of building block | True | True | + | coarse_grained_mapping | str | SC3 beads in MARTINI forcefield | atom to beads mapping | | | + | implicit_solvent | str | water | Name of implicit solvent | | | + | source | str | package in GROMACS | source of forcefield | | | + | description | str | OPLS forcefield with partial charges calculated via the LBCC algorithm | description of the forcefield and any modifications that have been added | | | + | data | Data | | details of mapping schema and forcefield parameters | | | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + + + ## Can be Added To Primary Node: + * Material node + + ## JSON Representation + ```json + { + "node": ["ComputationalForcefield"], + "key": "opls_aa", + "building_block": "atom", + "coarse_grained_mapping": "atom -> atom", + "implicit_solvent": "no implicit solvent", + "source": "local LigParGen installation", + "description": "this is a test forcefield", + "data": { + "node":["Data"], + "name":"my data name", + "type":"afm_amp", + "file":[ + { + "node":["File"], + "type":"calibration", + "source":"https://criptapp.org", + "extension":".csv", + "data_dictionary":"my file's data dictionary" + } + ] + }, + "citation": { + "node": ["Citation"], + "type": "reference" + "reference": { + "node": ["Reference"], + "type": "journal_article", + "title": "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: SOft coarse grained Monte-Carlo Acceleration (SOMA)", + "author": ["Ludwig Schneider", "Marcus Müller"], + "journal": "Computer Physics Communications", + "publisher": "Elsevier", + "year": 2019, + "pages": [463, 476], + "doi": "10.1016/j.cpc.2018.08.011", + "issn": "0010-4655", + "website": "https://www.sciencedirect.com/science/article/pii/S0010465518303072", + } + } + + + ``` + + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + building_block: str = "" + coarse_grained_mapping: str = "" + implicit_solvent: str = "" + source: str = "" + description: str = "" + data: List[Data] = field(default_factory=list) + citation: List[Citation] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, key: str, building_block: str, coarse_grained_mapping: str = "", implicit_solvent: str = "", source: str = "", description: str = "", data: Optional[List[Data]] = None, citation: Optional[List[Citation]] = None, **kwargs): + """ + instantiate a computational_forcefield subobject + + Parameters + ---------- + key : str + type of forcefield key must come from [CRIPT Controlled Vocabulary]() + building_block : str + type of computational_forcefield building_block must come from [CRIPT Controlled Vocabulary]() + coarse_grained_mapping : str, optional + atom to beads mapping, by default "" + implicit_solvent : str, optional + Name of implicit solvent, by default "" + source : str, optional + source of forcefield, by default "" + description : str, optional + description of the forcefield and any modifications that have been added, by default "" + data : List[Data], optional + details of mapping schema and forcefield parameters, by default None + citation : Union[List[Citation], None], optional + reference to a book, paper, or scholarly work, by default None + + + Examples + -------- + ```python + my_computational_forcefield = cript.ComputationalForcefield( + key="opls_aa", + building_block="atom", + ) + ``` + + Returns + ------- + None + Instantiate a computational_forcefield subobject + """ + if citation is None: + citation = [] + super().__init__(**kwargs) + + if data is None: + data = [] + + self._json_attrs = replace( + self._json_attrs, + key=key, + building_block=building_block, + coarse_grained_mapping=coarse_grained_mapping, + implicit_solvent=implicit_solvent, + source=source, + description=description, + data=data, + citation=citation, + ) + self.validate() + + @property + @beartype + def key(self) -> str: + """ + type of forcefield + + Computational_Forcefield key must come from + [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/computational_forcefield_key) + + Examples + -------- + ```python + my_computational_forcefield.key = "amber" + ``` + + Returns + ------- + str + type of forcefield + """ + return self._json_attrs.key + + @key.setter + @beartype + def key(self, new_key: str) -> None: + """ + set key for this computational_forcefield + + Parameters + ---------- + new_key : str + computational_forcefield key + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, key=new_key) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def building_block(self) -> str: + """ + type of building block + + Computational_Forcefield building_block must come from + [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/building_block) + + Examples + -------- + ```python + my_computational_forcefield.building_block = "atom" + ``` + + Returns + ------- + str + type of building block + """ + return self._json_attrs.building_block + + @building_block.setter + @beartype + def building_block(self, new_building_block: str) -> None: + """ + type of building block + + Parameters + ---------- + new_building_block : str + new type of building block + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, building_block=new_building_block) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def coarse_grained_mapping(self) -> str: + """ + atom to beads mapping + + Examples + -------- + ```python + my_computational_forcefield.coarse_grained_mapping = "SC3 beads in MARTINI forcefield" + ``` + + Returns + ------- + str + coarse_grained_mapping + """ + return self._json_attrs.coarse_grained_mapping + + @coarse_grained_mapping.setter + @beartype + def coarse_grained_mapping(self, new_coarse_grained_mapping: str) -> None: + """ + atom to beads mapping + + Parameters + ---------- + new_coarse_grained_mapping : str + new coarse_grained_mapping + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, coarse_grained_mapping=new_coarse_grained_mapping) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def implicit_solvent(self) -> str: + """ + Name of implicit solvent + + Examples + -------- + ```python + my_computational_forcefield.implicit_solvent = "water" + ``` + + Returns + ------- + str + _description_ + """ + return self._json_attrs.implicit_solvent + + @implicit_solvent.setter + @beartype + def implicit_solvent(self, new_implicit_solvent: str) -> None: + """ + set the implicit_solvent + + Parameters + ---------- + new_implicit_solvent : str + new implicit_solvent + """ + new_attrs = replace(self._json_attrs, implicit_solvent=new_implicit_solvent) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def source(self) -> str: + """ + source of forcefield + + Examples + -------- + ```python + my_computational_forcefield.source = "package in GROMACS" + ``` + + Returns + ------- + str + source of forcefield + """ + return self._json_attrs.source + + @source.setter + @beartype + def source(self, new_source: str) -> None: + """ + set the computational_forcefield + + Parameters + ---------- + new_source : str + new source of forcefield + """ + new_attrs = replace(self._json_attrs, source=new_source) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def description(self) -> str: + """ + description of the forcefield and any modifications that have been added + + Examples + -------- + ```python + my_computational_forcefield.description = "OPLS forcefield with partial charges calculated via the LBCC algorithm" + ``` + + Returns + ------- + str + description of the forcefield and any modifications that have been added + """ + return self._json_attrs.description + + @description.setter + @beartype + def description(self, new_description: str) -> None: + """ + set this computational_forcefields description + + Parameters + ---------- + new_description : str + new computational_forcefields description + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, description=new_description) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def data(self) -> List[Data]: + """ + details of mapping schema and forcefield parameters + + Examples + -------- + ```python + # create file nodes for the data node + my_file = cript.File( + source="https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf", + type="calibration", + extension=".pdf", + ) + + # create data node and add the file node to it + my_data = cript.Data( + name="my data node name", + type="afm_amp", + file=my_file, + ) + + # add data node to computational_forcefield subobject + my_computational_forcefield.data = [my_data] + ``` + + Returns + ------- + List[Data] + list of data nodes for this computational_forcefield subobject + """ + return self._json_attrs.data.copy() + + @data.setter + @beartype + def data(self, new_data: List[Data]) -> None: + """ + set the data attribute of this computational_forcefield node + + Parameters + ---------- + new_data : List[Data] + new list of data nodes + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, data=new_data) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Citation]: + """ + reference to a book, paper, or scholarly work + + Examples + -------- + ```python + # create reference node for the citation node + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "SOft coarse grained Monte-Carlo Acceleration (SOMA)" + + my_reference = cript.Reference( + "journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + # create citation node and add reference node to it + my_citation = cript.Citation(type="reference", reference=my_reference) + + my_computational_forcefield.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + computational_forcefield list of citations + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation: List[Citation]) -> None: + """ + set the citation subobject of the computational_forcefield subobject + + Parameters + ---------- + new_citation : List[Citation] + new citation subobject + """ + new_attrs = replace(self._json_attrs, citation=new_citation) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/condition.py b/src/cript/nodes/subobjects/condition.py new file mode 100644 index 000000000..1e06dc8e6 --- /dev/null +++ b/src/cript/nodes/subobjects/condition.py @@ -0,0 +1,537 @@ +from dataclasses import dataclass, field, replace +from numbers import Number +from typing import List, Optional, Union + +from beartype import beartype + +from cript.nodes.primary_nodes.data import Data +from cript.nodes.uuid_base import UUIDBaseNode + + +class Condition(UUIDBaseNode): + """ + ## Definition + + A [Condition](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=21) sub-object + is the conditions under which the experiment was conducted. + Some examples include temperature, mixing_rate, stirring, time_duration. + + ---- + + ## Can Be Added To: + ### Primary Nodes + * [Process](../../primary_nodes/process) + * [Computation_Process](../../primary_nodes/computation_process) + + ### Subobjects + * [Property](../property) + * [Equipment](../equipment) + + --- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |------------------|--------|-------------------------|----------------------------------------------------------------------------------------|----------|-------| + | key | str | temp | type of condition | True | True | + | type | str | min | type of value stored, 'value' is just the number, 'min', 'max', 'avg', etc. for series | True | True | + | descriptor | str | upper temperature probe | freeform description for condition | | | + | value | Number | 1.23 | value or quantity | True | | + | unit | str | gram | unit for value | | | + | uncertainty | Number | 0.1 | uncertainty of value | | | + | uncertainty_type | str | std | type of uncertainty | | True | + | set_id | int | 0 | ID of set (used to link measurements in as series) | | | + | measurement _id | int | 0 | ID for a single measurement (used to link multiple condition at a single instance) | | | + | data | List[Data] | | detailed data associated with the condition | | | + + ## JSON Representation + ```json + { + "node": ["Condition"], + "key": "temperature", + "type": "value", + "descriptor": "room temperature of lab", + "value": 22, + "unit": "C", + "uncertainty": 5, + "uncertainty_type": "stdev", + "set_id": 0, + "measurement_id": 2, + "data": [{ + "node":["Data"], + "name":"my data name", + "type":"afm_amp", + "file":[ + { + "node":["File"], + "type":"calibration", + "source":"https://criptapp.org", + "extension":".csv", + "data_dictionary":"my file's data dictionary" + } + ] + }], + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + type: str = "" + descriptor: str = "" + value: Optional[Union[Number, str]] = None + unit: str = "" + uncertainty: Optional[Union[Number, str]] = None + uncertainty_type: str = "" + set_id: Optional[int] = None + measurement_id: Optional[int] = None + data: List[Data] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + key: str, + type: str, + value: Union[Number, str], + unit: str = "", + descriptor: str = "", + uncertainty: Optional[Union[Number, str]] = None, + uncertainty_type: str = "", + set_id: Optional[int] = None, + measurement_id: Optional[int] = None, + data: Optional[List[Data]] = None, + **kwargs + ): + """ + create Condition sub-object + + Parameters + ---------- + key : str + type of condition + type : str + type of value stored + value : Number + value or quantity + unit : str, optional + unit for value, by default "" + descriptor : str, optional + freeform description for condition, by default "" + uncertainty : Union[Number, None], optional + uncertainty of value, by default None + uncertainty_type : str, optional + type of uncertainty, by default "" + set_id : Union[int, None], optional + ID of set (used to link measurements in as series), by default None + measurement_id : Union[int, None], optional + ID for a single measurement (used to link multiple condition at a single instance), by default None + data : List[Data], optional + detailed data associated with the condition, by default None + + + Examples + -------- + ```python + # instantiate a Condition sub-object + my_condition = cript.Condition( + key="temperature", + type="value", + value=22, + unit="C", + ) + ``` + + Returns + ------- + None + """ + super().__init__(**kwargs) + + if data is None: + data = [] + + self._json_attrs = replace( + self._json_attrs, + key=key, + type=type, + value=value, + descriptor=descriptor, + unit=unit, + uncertainty=uncertainty, + uncertainty_type=uncertainty_type, + set_id=set_id, + measurement_id=measurement_id, + data=data, + ) + self.validate() + + @property + @beartype + def key(self) -> str: + """ + type of condition + + > Condition key must come from [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/condition_key) + + Examples + -------- + ```python + my_condition.key = "energy_threshold" + ``` + + Returns + ------- + condition key: str + type of condition + """ + return self._json_attrs.key + + @key.setter + @beartype + def key(self, new_key: str) -> None: + """ + set this Condition sub-object key + + > Condition key must come from [CRIPT Controlled Vocabulary]() + + Parameters + ---------- + new_key : str + type of condition + + Returns + -------- + None + """ + new_attrs = replace(self._json_attrs, key=new_key) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def type(self) -> str: + """ + description for the value stored for this Condition node + + Examples + -------- + ```python + my_condition.type = "min" + ``` + + Returns + ------- + condition type: str + description for the value + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_type: str) -> None: + """ + set the type attribute for this Condition node + + Parameters + ---------- + new_type : str + new description of the Condition value + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, type=new_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def descriptor(self) -> str: + """ + freeform description for Condition + + Examples + -------- + ```python + my_condition.description = "my condition description" + ``` + + Returns + ------- + description: str + description of this Condition sub-object + """ + return self._json_attrs.descriptor + + @descriptor.setter + @beartype + def descriptor(self, new_descriptor: str) -> None: + """ + set the description of this Condition sub-object + + Parameters + ---------- + new_descriptor : str + new description describing the Condition subobject + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, descriptor=new_descriptor) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def value(self) -> Optional[Union[Number, str]]: + """ + value or quantity + + Examples + ------- + ```python + my_condition.value = 10 + ``` + + Returns + ------- + Union[Number, None] + new value or quantity + """ + return self._json_attrs.value + + def set_value(self, new_value: Union[Number, str], new_unit: str) -> None: + """ + set the value for this Condition subobject + + Parameters + ---------- + new_value : Number + new value + new_unit : str + units for the new value + + Examples + -------- + ```python + my_condition.set_value(new_value=1, new_unit="gram") + ``` + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, value=new_value, unit=new_unit) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def unit(self) -> str: + """ + set units for this Condition subobject + + Examples + -------- + ```python + my_condition.unit = "gram" + ``` + + Returns + ------- + unit: str + units + """ + return self._json_attrs.unit + + @property + @beartype + def uncertainty(self) -> Optional[Union[Number, str]]: + """ + set uncertainty value for this Condition subobject + + Examples + -------- + ```python + my_condition.uncertainty = "0.1" + ``` + + Returns + ------- + uncertainty: Union[Number, None] + uncertainty + """ + return self._json_attrs.uncertainty + + @beartype + def set_uncertainty(self, new_uncertainty: Union[Number, str], new_uncertainty_type: str) -> None: + """ + set uncertainty and uncertainty type + + Parameters + ---------- + new_uncertainty : Number + new uncertainty value + new_uncertainty_type : str + new uncertainty type + + Examples + -------- + ```python + my_condition.set_uncertainty(new_uncertainty="0.2", new_uncertainty_type="std") + ``` + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, uncertainty=new_uncertainty, uncertainty_type=new_uncertainty_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def uncertainty_type(self) -> str: + """ + Uncertainty type for the uncertainty value + + [Uncertainty type](https://app.criptapp.org/vocab/uncertainty_type) must come from CRIPT controlled vocabulary + + Examples + -------- + ```python + my_condition.uncertainty_type = "std" + ``` + + Returns + ------- + uncertainty_type: str + uncertainty type + """ + return self._json_attrs.uncertainty_type + + @property + @beartype + def set_id(self) -> Union[int, None]: + """ + ID of set (used to link measurements in as series) + + Examples + -------- + ```python + my_condition.set_id = 0 + ``` + + Returns + ------- + set_id: Union[int, None] + ID of set + """ + return self._json_attrs.set_id + + @set_id.setter + @beartype + def set_id(self, new_set_id: Union[int, None]) -> None: + """ + set this Condition subobjects set_id + + Parameters + ---------- + new_set_id : Union[int, None] + ID of set + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, set_id=new_set_id) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def measurement_id(self) -> Union[int, None]: + """ + ID for a single measurement (used to link multiple condition at a single instance) + + Examples + -------- + ```python + my_condition.measurement_id = 0 + ``` + + Returns + ------- + measurement_id: Union[int, None] + ID for a single measurement + """ + return self._json_attrs.measurement_id + + @measurement_id.setter + @beartype + def measurement_id(self, new_measurement_id: Union[int, None]) -> None: + """ + set the set_id for this Condition subobject + + Parameters + ---------- + new_measurement_id : Union[int, None] + ID for a single measurement + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, measurement_id=new_measurement_id) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def data(self) -> List[Data]: + """ + detailed data associated with the condition + + Examples + -------- + ```python + # create file nodes for the data node + my_file = cript.File( + source="https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf", + type="calibration", + extension=".pdf", + ) + + # create data node and add the file node to it + my_data = cript.Data( + name="my data node name", + type="afm_amp", + file=my_file, + ) + + # add data node to Condition subobject + my_condition.data = [my_data] + ``` + + Returns + ------- + Condition: Union[Data, None] + detailed data associated with the condition + """ + return self._json_attrs.data.copy() + + @data.setter + @beartype + def data(self, new_data: List[Data]) -> None: + """ + set the data node for this Condition Subobject + + Parameters + ---------- + new_data : List[Data] + new Data node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, data=new_data) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/equipment.py b/src/cript/nodes/subobjects/equipment.py new file mode 100644 index 000000000..625ae2648 --- /dev/null +++ b/src/cript/nodes/subobjects/equipment.py @@ -0,0 +1,327 @@ +from dataclasses import dataclass, field, replace +from typing import List, Union + +from beartype import beartype + +from cript.nodes.subobjects.citation import Citation +from cript.nodes.subobjects.condition import Condition +from cript.nodes.supporting_nodes.file import File +from cript.nodes.uuid_base import UUIDBaseNode + + +class Equipment(UUIDBaseNode): + """ + ## Definition + An [Equipment](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=23) + sub-object specifies the physical instruments, tools, glassware, etc. used in a process. + + --- + + ## Can Be Added To: + * [Process node](../../primary_nodes/process) + + ## Available sub-objects: + * [Condition](../condition) + * [Citation](../citation) + + --- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |-------------|-----------------|-----------------------------------------------|--------------------------------------------------------------------------------|----------|-------| + | key | str | hot plate | material | True | True | + | description | str | Hot plate with silicon oil bath with stir bar | additional details about the equipment | | | + | condition | list[Condition] | | conditions under which the property was measured | | | + | files | list[File] | | list of file nodes to link to calibration or equipment specification documents | | | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + + ## JSON Representation + ```json + { + "node":["Equipment"], + "description": "my equipment description", + "key":"burner", + "uid":"_:19708284-1bd7-42e4-b8b2-da7ea0bc2ac9", + "uuid":"19708284-1bd7-42e4-b8b2-da7ea0bc2ac9" + } + ``` + + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + description: str = "" + condition: List[Condition] = field(default_factory=list) + file: List[File] = field(default_factory=list) + citation: List[Citation] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, key: str, description: str = "", condition: Union[List[Condition], None] = None, file: Union[List[File], None] = None, citation: Union[List[Citation], None] = None, **kwargs) -> None: + """ + create equipment sub-object + + Parameters + ---------- + key : str + Equipment key must come from [CRIPT Controlled Vocabulary]() + description : str, optional + additional details about the equipment, by default "" + condition : Union[List[Condition], None], optional + Conditions under which the property was measured, by default None + file : Union[List[File], None], optional + list of file nodes to link to calibration or equipment specification documents, by default None + citation : Union[List[Citation], None], optional + reference to a scholarly work, by default None + + Example + ------- + ```python + my_equipment = cript.Equipment(key="burner") + ``` + + Returns + ------- + None + instantiate equipment sub-object + """ + if condition is None: + condition = [] + if file is None: + file = [] + if citation is None: + citation = [] + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, key=key, description=description, condition=condition, file=file, citation=citation) + self.validate() + + @property + @beartype + def key(self) -> str: + """ + scientific instrument + + Equipment key must come from [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/equipment_key) + + Examples + -------- + ```python + my_equipment = cript.Equipment(key="burner") + ``` + + Returns + ------- + Equipment: str + + """ + return self._json_attrs.key + + @key.setter + @beartype + def key(self, new_key: str) -> None: + """ + set the equipment key + + > Equipment key must come from [CRIPT Controlled Vocabulary]() + + Parameters + ---------- + new_key : str + equipment sub-object key + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, key=new_key) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def description(self) -> str: + """ + description of the equipment + + Examples + -------- + ```python + my_equipment.description = "additional details about the equipment" + ``` + + Returns + ------- + str + additional description of the equipment + """ + return self._json_attrs.description + + @description.setter + @beartype + def description(self, new_description: str) -> None: + """ + set this equipments description + + Parameters + ---------- + new_description : str + equipment description + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, description=new_description) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def condition(self) -> List[Condition]: + """ + conditions under which the property was measured + + Examples + -------- + ```python + # create a Condition sub-object + my_condition = cript.Condition( + key="temperature", + type="value", + value=22, + unit="C", + ) + + # add Condition sub-object to Equipment sub-object + my_equipment.condition = [my_condition] + ``` + + Returns + ------- + List[Condition] + list of Condition sub-objects + """ + return self._json_attrs.condition.copy() + + @condition.setter + @beartype + def condition(self, new_condition: List[Condition]) -> None: + """ + set a list of Conditions for the equipment sub-object + + Parameters + ---------- + new_condition : List[Condition] + list of Condition sub-objects + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, condition=new_condition) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def file(self) -> List[File]: + """ + list of file nodes to link to calibration or equipment specification documents + + Examples + -------- + ```python + # create a file node to be added to the equipment sub-object + my_file = cript.File( + source="https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf", + type="calibration", + extension=".pdf", + ) + + # add file node to equipment sub-object + my_equipment.file = [my_file] + + ``` + + Returns + ------- + List[File] + list of file nodes + """ + return self._json_attrs.file.copy() + + @file.setter + @beartype + def file(self, new_file: List[File]) -> None: + """ + set the file node for the equipment subobject + + Parameters + ---------- + new_file : List[File] + list of File nodes + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, file=new_file) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Citation]: + """ + reference to a book, paper, or scholarly work + + Examples + -------- + ```python + # create reference node for the citation node + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "SOft coarse grained Monte-Carlo Acceleration (SOMA)" + + my_reference = cript.Reference( + type="journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + # create citation node and add reference node to it + my_citation = cript.Citation(type="reference", reference=my_reference) + + # add citation subobject to equipment + my_equipment.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of Citation subobjects + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation: List[Citation]) -> None: + """ + set the citation subobject for this equipment subobject + + Parameters + ---------- + new_citation : List[Citation] + list of Citation subobjects + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/ingredient.py b/src/cript/nodes/subobjects/ingredient.py new file mode 100644 index 000000000..43188a461 --- /dev/null +++ b/src/cript/nodes/subobjects/ingredient.py @@ -0,0 +1,222 @@ +from dataclasses import dataclass, field, replace +from typing import List, Optional, Union + +from beartype import beartype + +from cript.nodes.primary_nodes.material import Material +from cript.nodes.subobjects.quantity import Quantity +from cript.nodes.uuid_base import UUIDBaseNode + + +class Ingredient(UUIDBaseNode): + """ + ## Definition + An [Ingredient](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=22) + sub-objects are links to material nodes with the associated quantities. + + --- + + ## Can Be Added To: + * [process](../../primary_nodes/process) + * [computation_process](../../primary_nodes/computation_process) + + ## Available sub-objects: + * [Quantity](../quantity) + + --- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |------------|----------------|----------|------------------------|----------|-------| + | material | Material | | material | True | | + | quantity | list[Quantity] | | quantities | True | | + | keyword | list(str) | catalyst | keyword for ingredient | | True | + + ## JSON Representation + ```json + { + "node":["Ingredient"], + "keyword":["catalyst"], + "uid":"_:32f173ab-a98a-449b-a528-1b656f652dd3", + "uuid":"32f173ab-a98a-449b-a528-1b656f652dd3" + "material":{ + "name":"my material 1", + "node":["Material"], + "bigsmiles":"[H]{[>][<]C(C[>])c1ccccc1[]}", + "uid":"_:029367a8-aee7-493a-bc08-991e0f6939ae", + "uuid":"029367a8-aee7-493a-bc08-991e0f6939ae" + }, + "quantity":[ + { + "node":["Quantity"], + "key":"mass", + "value":11.2 + "uncertainty":0.2, + "uncertainty_type":"stdev", + "unit":"kg", + "uid":"_:c95ee781-923b-4699-ba3b-923ce186ac5d", + "uuid":"c95ee781-923b-4699-ba3b-923ce186ac5d", + } + ] + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + material: Optional[Material] = None + quantity: List[Quantity] = field(default_factory=list) + keyword: List[str] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, material: Material, quantity: List[Quantity], keyword: Optional[List[str]] = None, **kwargs): + """ + create an ingredient sub-object + + Examples + -------- + ```python + import cript + + # create material and identifier for the ingredient sub-object + my_identifiers = [{"bigsmiles": "123456"}] + my_material = cript.Material(name="my material", identifier=my_identifiers) + + # create quantity sub-object + my_quantity = cript.Quantity(key="mass", value=11.2, unit="kg", uncertainty=0.2, uncertainty_type="stdev") + + # create ingredient sub-object and add all appropriate nodes/sub-objects + my_ingredient = cript.Ingredient(material=my_material, quantity=my_quantity, keyword="catalyst") + ``` + + Parameters + ---------- + material : Material + material node + quantity : List[Quantity] + list of quantity sub-objects + keyword : List[str], optional + ingredient keyword must come from [CRIPT Controlled Vocabulary](), by default "" + + Returns + ------- + None + Create new Ingredient sub-object + """ + super().__init__(**kwargs) + if keyword is None: + keyword = [] + self._json_attrs = replace(self._json_attrs, material=material, quantity=quantity, keyword=keyword) + self.validate() + + @classmethod + def _from_json(cls, json_dict: dict): + # TODO: remove this temporary fix, once back end is working correctly + if isinstance(json_dict["material"], list): + assert len(json_dict["material"]) == 1 + json_dict["material"] = json_dict["material"][0] + return super(Ingredient, cls)._from_json(json_dict) + + @property + @beartype + def material(self) -> Union[Material, None]: + """ + current material in this ingredient sub-object + + Returns + ------- + Material + Material node within the ingredient sub-object + """ + return self._json_attrs.material + + @property + @beartype + def quantity(self) -> List[Quantity]: + """ + quantity for the ingredient sub-object + + Returns + ------- + List[Quantity] + list of quantities for the ingredient sub-object + """ + return self._json_attrs.quantity.copy() + + @beartype + def set_material(self, new_material: Material, new_quantity: List[Quantity]) -> None: + """ + update ingredient sub-object with new material and new list of quantities + + Examples + -------- + ```python + my_identifiers = [{"bigsmiles": "123456"}] + my_new_material = cript.Material(name="my material", identifier=my_identifiers) + + my_new_quantity = cript.Quantity( + key="mass", value=11.2, unit="kg", uncertainty=0.2, uncertainty_type="stdev" + ) + + # set new material and list of quantities + my_ingredient.set_material(new_material=my_new_material, new_quantity=[my_new_quantity]) + + ``` + + Parameters + ---------- + new_material : Material + new material node to replace the current + new_quantity : List[Quantity] + new list of quantity sub-objects to replace the current quantity subobject on this node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, material=new_material, quantity=new_quantity) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def keyword(self) -> List[str]: + """ + ingredient keyword must come from the + [CRIPT controlled vocabulary](https://app.criptapp.org/vocab/ingredient_keyword) + + Examples + -------- + ```python + # set new ingredient keyword + my_ingredient.keyword = "computation" + ``` + + Returns + ------- + str + get the current ingredient keyword + """ + return self._json_attrs.keyword.copy() + + @keyword.setter + @beartype + def keyword(self, new_keyword: List[str]) -> None: + """ + set new ingredient keyword to replace the current + + ingredient keyword must come from the [CRIPT controlled vocabulary]() + + Parameters + ---------- + new_keyword : str + new ingredient keyword + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, keyword=new_keyword) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/parameter.py b/src/cript/nodes/subobjects/parameter.py new file mode 100644 index 000000000..55726e7fd --- /dev/null +++ b/src/cript/nodes/subobjects/parameter.py @@ -0,0 +1,221 @@ +from dataclasses import dataclass, replace +from numbers import Number +from typing import Optional, Union + +from beartype import beartype + +from cript.nodes.uuid_base import UUIDBaseNode + + +class Parameter(UUIDBaseNode): + """ + ## Definition + + A [parameter](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=25) + is an input value to an algorithm. + + ??? note "Difference between `Parameter` and `Condition`" + For typical computations, the difference between + parameter and condition lies in whether it changes the thermodynamic state of the simulated + system: Variables that are part of defining a thermodynamic state should be defined as a condition + in a parent node. + + Therefore, `number` and `volume` need to be listed as conditions while + `boundaries` and `origin` are parameters of ensemble size + + --- + ## Can Be Added To: + * [Algorithm sub-object](../algorithm) + + ## Available sub-objects: + * None + + --- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |-----------|------|---------|--------------------|----------|-------| + | key | str | | key for identifier | True | True | + | value | Any | | value | True | | + | unit | str | | unit for parameter | | | + + + ## JSON Representation + ```json + { + "key":"update_frequency", + "node":["Parameter"], + "unit":"1/second", + "value":1000.0 + "uid":"_:6af3b3aa-1dbc-4ce7-be8b-1896b375001c", + "uuid":"6af3b3aa-1dbc-4ce7-be8b-1896b375001c", + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + value: Optional[Number] = None + # We explicitly allow None for unit here (instead of empty str), + # this presents number without physical unit, like counting + # particles or dimensionless numbers. + unit: Union[str, None] = None + + _json_attrs: JsonAttributes = JsonAttributes() + + # Note that the key word args are ignored. + # They are just here, such that we can feed more kwargs in that we get from the back end. + @beartype + def __init__(self, key: str, value: Number, unit: Optional[str] = None, **kwargs): + """ + create new Parameter sub-object + + Parameters + ---------- + key : str + Parameter key must come from [CRIPT Controlled Vocabulary]() + value : Union[int, float] + Parameter value + unit : Union[str, None], optional + Parameter unit, by default None + + Examples + -------- + ```python + import cript + + my_parameter = cript.Parameter("update_frequency", 1000.0, "1/second") + ``` + + Returns + ------- + None + create Parameter sub-object + """ + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, key=key, value=value, unit=unit) + self.validate() + + @classmethod + def _from_json(cls, json_dict: dict): + # TODO: remove this temporary fix, once back end is working correctly + try: + json_dict["value"] = float(json_dict["value"]) + except KeyError: + pass + return super(Parameter, cls)._from_json(json_dict) + + @property + @beartype + def key(self) -> str: + """ + Parameter key must come from the [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/parameter_key) + + Examples + -------- + ```python + my_parameter.key = "bond_type" + ``` + + Returns + ------- + str + parameter key + """ + return self._json_attrs.key + + @key.setter + @beartype + def key(self, new_key: str) -> None: + """ + set new key for the Parameter sub-object + + Parameter key must come from [CRIPT Controlled Vocabulary]() + + Parameters + ---------- + new_key : str + new Parameter key + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, key=new_key) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def value(self) -> Optional[Number]: + """ + Parameter value + + Examples + -------- + ```python + my_parameter.value = 1 + ``` + + Returns + ------- + Union[int, float, str] + parameter value + """ + return self._json_attrs.value + + @value.setter + @beartype + def value(self, new_value: Number) -> None: + """ + set the Parameter value + + Parameters + ---------- + new_value : Union[int, float, str] + new parameter value + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, value=new_value) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def unit(self) -> Union[str, None]: + """ + Parameter unit + + Examples + -------- + ```python + my_parameter.unit = "gram" + ``` + + Returns + ------- + str + parameter unit + """ + return self._json_attrs.unit + + @unit.setter + @beartype + def unit(self, new_unit: str) -> None: + """ + set the unit attribute for the Parameter sub-object + + Parameters + ---------- + new_unit : str + new Parameter unit + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, unit=new_unit) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/property.py b/src/cript/nodes/subobjects/property.py new file mode 100644 index 000000000..1da686b54 --- /dev/null +++ b/src/cript/nodes/subobjects/property.py @@ -0,0 +1,753 @@ +from dataclasses import dataclass, field, replace +from numbers import Number +from typing import List, Optional, Union + +from beartype import beartype + +from cript.nodes.primary_nodes.computation import Computation +from cript.nodes.primary_nodes.data import Data +from cript.nodes.primary_nodes.material import Material +from cript.nodes.primary_nodes.process import Process +from cript.nodes.subobjects.citation import Citation +from cript.nodes.subobjects.condition import Condition +from cript.nodes.uuid_base import UUIDBaseNode + + +class Property(UUIDBaseNode): + """ + ## Definition + [Property](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=18) sub-objects + are qualities/traits of a [material](../../primary_nodes/material) or or [Process](../../primary_nodes/process) + + --- + + ## Can Be Added To: + * [Material](../../primary_nodes/material) + * [Process](../../primary_nodes/process) + * [Computation_Process](../../primary_nodes/computation_process) + + ## Available sub-objects: + * [Condition](../condition) + * [Citation](../citation) + + --- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |--------------------|-------------------|------------------------------------------------------------------------|------------------------------------------------------------------------------|----------|-------| + | key | str | modulus_shear | type of property | True | True | + | type | str | min | type of value stored | True | True | + | value | Any | 1.23 | value or quantity | True | | + | unit | str | gram | unit for value | True | | + | uncertainty | Number | 0.1 | uncertainty of value | | | + | uncertainty_type | str | standard_deviation | type of uncertainty | | True | + | component | list[Material] | | material that the property relates to** | | | + | structure | str | {\[\]\[$\]\[C:1\]\[C:1\]\[$\], \[$\]\[C:2\]\[C:2\](\[C:2\]) \[$\]\[\]} | specific chemical structure associate with the property with atom mappings** | | | + | method | str | sec | approach or source of property data | | True | + | sample_preparation | Process | | sample preparation | | | + | condition | list[Condition] | | conditions under which the property was measured | | | + | data | Data | | data node | | | + | computation | list[Computation] | | computation method that produced property | | | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + | notes | str | | miscellaneous information, or custom data structure (e.g.; JSON) | | | + + + ## JSON Representation + ```json + { + "key":"modulus_shear", + "node":["Property"], + "type":"value", + "unit":"GPa", + "value":5.0 + "uid":"_:bc3abb68-25b5-4144-aa1b-85d82b7c77e1", + "uuid":"bc3abb68-25b5-4144-aa1b-85d82b7c77e1", + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + type: str = "" + value: Union[Number, str, None] = None + unit: str = "" + uncertainty: Optional[Number] = None + uncertainty_type: str = "" + component: List[Material] = field(default_factory=list) + structure: str = "" + method: str = "" + sample_preparation: Optional[Process] = None + condition: List[Condition] = field(default_factory=list) + data: List[Data] = field(default_factory=list) + computation: List[Computation] = field(default_factory=list) + citation: List[Citation] = field(default_factory=list) + notes: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__( + self, + key: str, + type: str, + value: Union[Number, str, None], + unit: Union[str, None], + uncertainty: Optional[Number] = None, + uncertainty_type: str = "", + component: Optional[List[Material]] = None, + structure: str = "", + method: str = "", + sample_preparation: Optional[Process] = None, + condition: Optional[List[Condition]] = None, + data: Optional[List[Data]] = None, + computation: Optional[List[Computation]] = None, + citation: Optional[List[Citation]] = None, + notes: str = "", + **kwargs + ): + """ + create a property sub-object + + Parameters + ---------- + key : str + type of property, Property key must come from the [CRIPT Controlled Vocabulary]() + type : str + type of value stored, Property type must come from the [CRIPT Controlled Vocabulary]() + value : Union[Number, None] + value or quantity + unit : str + unit for value + uncertainty : Union[Number, None], optional + uncertainty value of the value, by default None + uncertainty_type : str, optional + type of uncertainty, by default "" + component : Union[List[Material], None], optional + List of Material nodes, by default None + structure : str, optional + specific chemical structure associate with the property with atom mappings**, by default "" + method : str, optional + approach or source of property data, by default "" + sample_preparation : Union[Process, None], optional + sample preparation, by default None + condition : Union[List[Condition], None], optional + conditions under which the property was measured, by default None + data : Union[List[Data], None], optional + Data node, by default None + computation : Union[List[Computation], None], optional + computation method that produced property, by default None + citation : Union[List[Citation], None], optional + reference scholarly work, by default None + notes : str, optional + miscellaneous information, or custom data structure (e.g.; JSON), by default "" + + + Examples + -------- + ```python + import cript + + my_property = cript.Property(key="air_flow", type="min", value=1.00, unit="gram") + ``` + + Returns + ------- + None + create a Property sub-object + """ + if component is None: + component = [] + if condition is None: + condition = [] + if computation is None: + computation = [] + if data is None: + data = [] + if citation is None: + citation = [] + + super().__init__(**kwargs) + self._json_attrs = replace( + self._json_attrs, + key=key, + type=type, + value=value, + unit=unit, + uncertainty=uncertainty, + uncertainty_type=uncertainty_type, + component=component, + structure=structure, + method=method, + sample_preparation=sample_preparation, + condition=condition, + data=data, + computation=computation, + citation=citation, + notes=notes, + ) + self.validate() + + @property + @beartype + def key(self) -> str: + """ + Property key must come from [CRIPT Controlled Vocabulary](https://app.criptapp.org/vocab/) + + Examples + -------- + ```python + my_parameter.key = "angle_rdist" + ``` + + Returns + ------- + str + Property Key + """ + return self._json_attrs.key + + @key.setter + @beartype + def key(self, new_key: str) -> None: + """ + set the key for this Property sub-object + + Parameters + ---------- + new_key : str + new Property key + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, key=new_key) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def type(self) -> str: + """ + type of value for this Property sub-object + + [property type](https://app.criptapp.org/vocab/) must come from CRIPT controlled vocabulary + + Examples + ```python + my_property.type = "max" + ``` + + Returns + ------- + str + type of value for this Property sub-object + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_type: str) -> None: + """ + set the Property type for this subobject + + Parameters + ---------- + new_type : str + new Property type + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, type=new_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def value(self) -> Union[Number, str, None]: + """ + get the Property value + + Returns + ------- + Union[Number, None] + Property value + """ + return self._json_attrs.value + + @beartype + def set_value(self, new_value: Union[Number, str], new_unit: str) -> None: + """ + set the value attribute of the Property subobject + + Examples + --------- + ```python + my_property.set_value(new_value=1, new_unit="gram") + ``` + + Parameters + ---------- + new_value : Number + new value + new_unit : str + new unit for the value + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, value=new_value, unit=new_unit) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def unit(self) -> str: + """ + get the Property unit for the value + + Returns + ------- + str + unit + """ + return self._json_attrs.unit + + @property + @beartype + def uncertainty(self) -> Union[Number, None]: + """ + get the uncertainty value of the Property node + + Returns + ------- + Union[Number, None] + uncertainty value + """ + return self._json_attrs.uncertainty + + @beartype + def set_uncertainty(self, new_uncertainty: Number, new_uncertainty_type: str) -> None: + """ + set the uncertainty value and type + + Uncertainty type must come from [CRIPT Controlled Vocabulary] + + Parameters + ---------- + new_uncertainty : Number + new uncertainty value + new_uncertainty_type : str + new uncertainty type + + Examples + -------- + ```python + my_property.set_uncertainty(new_uncertainty=2, new_uncertainty_type="fwhm") + ``` + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, uncertainty=new_uncertainty, uncertainty_type=new_uncertainty_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def uncertainty_type(self) -> str: + """ + get the uncertainty_type for this Property subobject + + [Uncertainty type](https://app.criptapp.org/vocab/uncertainty_type) + must come from CRIPT Controlled Vocabulary + + Returns + ------- + str + Uncertainty type + """ + return self._json_attrs.uncertainty_type + + @property + @beartype + def component(self) -> List[Material]: + """ + list of Materials that the Property relates to + + Examples + --------- + ```python + + my_identifiers = [{"bigsmiles": "123456"}] + my_material = cript.Material(name="my material", identifier=my_identifiers) + + # add material node as component to Property subobject + my_property.component = my_material + ``` + + Returns + ------- + List[Material] + list of Materials that the Property relates to + """ + return self._json_attrs.component.copy() + + @component.setter + @beartype + def component(self, new_component: List[Material]) -> None: + """ + set the list of Materials as components for the Property subobject + + Parameters + ---------- + new_component : List[Material] + new list of Materials to for the Property subobject + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, component=new_component) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def structure(self) -> str: + """ + specific chemical structure associate with the property with atom mappings + + Examples + -------- + ```python + my_property.structure = "{[][$][C:1][C:1][$],[$][C:2][C:2]([C:2])[$][]}" + ``` + + Returns + ------- + str + Property structure string + """ + return self._json_attrs.structure + + @structure.setter + @beartype + def structure(self, new_structure: str) -> None: + """ + set the this Property's structure + + Parameters + ---------- + new_structure : str + new structure + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, structure=new_structure) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def method(self) -> str: + """ + approach or source of property data True sample_preparation Process sample preparation + + [Property method](https://app.criptapp.org/vocab/property_method) must come from CRIPT Controlled Vocabulary + + Examples + -------- + ```python + my_property.method = "ASTM_D3574_Test_A" + ``` + + Returns + ------- + str + Property method + """ + return self._json_attrs.method + + @method.setter + @beartype + def method(self, new_method: str) -> None: + """ + set the Property method + + Property method must come from [CRIPT Controlled Vocabulary]() + + Parameters + ---------- + new_method : str + new Property method + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, method=new_method) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def sample_preparation(self) -> Union[Process, None]: + """ + sample_preparation + + Examples + -------- + ```python + my_process = cript.Process(name="my process name", type="affinity_pure") + + my_property.sample_preparation = my_process + ``` + + Returns + ------- + Union[Process, None] + Property linking back to the Process that has it as subobject + """ + return self._json_attrs.sample_preparation + + @sample_preparation.setter + @beartype + def sample_preparation(self, new_sample_preparation: Union[Process, None]) -> None: + """ + set the sample_preparation for the Property subobject + + Parameters + ---------- + new_sample_preparation : Union[Process, None] + back link to the Process that has this Property as its subobject + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, sample_preparation=new_sample_preparation) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def condition(self) -> List[Condition]: + """ + list of Conditions under which the property was measured + + Examples + -------- + ```python + my_condition = cript.Condition(key="atm", type="max", value=1) + + my_property.condition = [my_condition] + ``` + + Returns + ------- + List[Condition] + list of Conditions + """ + return self._json_attrs.condition.copy() + + @condition.setter + @beartype + def condition(self, new_condition: List[Condition]) -> None: + """ + set the list of Conditions for this property subobject + + Parameters + ---------- + new_condition : List[Condition] + new list of Condition Subobjects + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, condition=new_condition) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def data(self) -> List[Data]: + """ + List of Data nodes for this Property subobjects + + Examples + -------- + ```python + # create file node for the Data node + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary", + ) + + # create data node for the property subobject + my_data = cript.Data(name="my data name", type="afm_amp", file=[my_file]) + + # add data node to Property subobject + my_property.data = my_data + ``` + + Returns + ------- + List[Data] + list of Data nodes + """ + return self._json_attrs.data.copy() + + @data.setter + @beartype + def data(self, new_data: List[Data]) -> None: + """ + set the Data node for the Property subobject + + Parameters + ---------- + new_data : List[Data] + new list of Data nodes + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, data=new_data) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def computation(self) -> List[Computation]: + """ + list of Computation nodes that produced this property + + Examples + -------- + ```python + my_computation = cript.Computation(name="my computation name", type="analysis") + + my_property.computation = [my_computation] + ``` + + Returns + ------- + List[Computation] + list of Computation nodes + """ + return self._json_attrs.computation.copy() + + @computation.setter + @beartype + def computation(self, new_computation: List[Computation]) -> None: + """ + set the list of Computation nodes that produced this property + + Parameters + ---------- + new_computation : List[Computation] + new list of Computation nodes + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, computation=new_computation) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Citation]: + """ + list of Citation subobjects for this Property subobject + + Examples + -------- + ```python + # create reference node for the citation node + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "Soft coarse grained Monte-Carlo Acceleration (SOMA)" + + my_reference = cript.Reference( + type="journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + # create citation node and add reference node to it + my_citation = cript.Citation(type="reference", reference=my_reference) + + # add citation to Property subobject + my_property.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of Citation subobjects for this Property subobject + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation: List[Citation]) -> None: + """ + set the list of Citation subobjects for the Property subobject + + Parameters + ---------- + new_citation : List[Citation] + new list of Citation subobjects + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def notes(self) -> str: + """ + notes for this Property subobject + + Examples + -------- + ```python + my_property.notes = "these are my notes" + ``` + + Returns + ------- + str + notes for this property subobject + """ + return self._json_attrs.notes + + @notes.setter + @beartype + def notes(self, new_notes: str) -> None: + """ + set the notes for this Property subobject + + Parameters + ---------- + new_notes : str + new notes + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, notes=new_notes) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/quantity.py b/src/cript/nodes/subobjects/quantity.py new file mode 100644 index 000000000..ae24ea464 --- /dev/null +++ b/src/cript/nodes/subobjects/quantity.py @@ -0,0 +1,258 @@ +from dataclasses import dataclass, replace +from numbers import Number +from typing import Optional, Union + +from beartype import beartype + +from cript.nodes.uuid_base import UUIDBaseNode + + +class Quantity(UUIDBaseNode): + """ + ## Definition + The [Quantity](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=22) + sub-objects are the amount of material involved in a process + + --- + + ## Can Be Added To: + * [Ingredient](../ingredient) + + ## Available sub-objects + * None + + ---- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |------------------|---------|---------|----------------------|----------|-------| + | key | str | mass | type of quantity | True | True | + | value | Any | 1.23 | amount of material | True | | + | unit | str | gram | unit for quantity | True | | + | uncertainty | Number | 0.1 | uncertainty of value | | | + | uncertainty_type | str | std | type of uncertainty | | True | + + + + + ## JSON Representation + ```json + { + "node":["Quantity"], + "key":"mass", + "value":11.2 + "uncertainty":0.2, + "uncertainty_type":"stdev", + "unit":"kg", + "uid":"_:c95ee781-923b-4699-ba3b-923ce186ac5d", + "uuid":"c95ee781-923b-4699-ba3b-923ce186ac5d", + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + key: str = "" + value: Optional[Number] = None + unit: str = "" + uncertainty: Optional[Number] = None + uncertainty_type: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, key: str, value: Number, unit: str, uncertainty: Optional[Number] = None, uncertainty_type: str = "", **kwargs): + """ + create Quantity sub-object + + Parameters + ---------- + key : str + type of quantity. Quantity key must come from [CRIPT Controlled Vocabulary]() + value : Number + amount of material + unit : str + unit for quantity + uncertainty : Union[Number, None], optional + uncertainty of value, by default None + uncertainty_type : str, optional + type of uncertainty. Quantity uncertainty type must come from [CRIPT Controlled Vocabulary](), by default "" + + Examples + -------- + ```python + import cript + + my_quantity = cript.Quantity( + key="mass", value=11.2, unit="kg", uncertainty=0.2, uncertainty_type="stdev" + ) + ``` + + Returns + ------- + None + create Quantity sub-object + """ + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, key=key, value=value, unit=unit, uncertainty=uncertainty, uncertainty_type=uncertainty_type) + self.validate() + + @classmethod + def _from_json(cls, json_dict: dict): + # TODO: remove this temporary fix, once back end is working correctly + for key in ["value", "uncertainty"]: + try: + json_dict[key] = float(json_dict[key]) + except KeyError: + pass + return super(Quantity, cls)._from_json(json_dict) + + @beartype + def set_key_unit(self, new_key: str, new_unit: str) -> None: + """ + set the Quantity key and unit attributes + + Quantity key must come from [CRIPT Controlled Vocabulary]() + + Examples + -------- + ```python + my_quantity.set_key_unit(new_key="mass", new_unit="gram") + ``` + + Parameters + ---------- + new_key : str + new Quantity key. Quantity key must come from [CRIPT Controlled Vocabulary]() + new_unit : str + new unit + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, key=new_key, unit=new_unit) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def key(self) -> str: + """ + get the Quantity sub-object key attribute + + [Quantity type](https://app.criptapp.org/vocab/quantity_key) must come from CRIPT controlled vocabulary + + Returns + ------- + str + this Quantity key attribute + """ + return self._json_attrs.key + + @property + @beartype + def value(self) -> Union[int, float, str]: + """ + amount of Material + + Examples + -------- + ```python + my_quantity.value = 1 + ``` + + Returns + ------- + Union[int, float, str] + amount of Material + """ + return self._json_attrs.value # type: ignore + + @value.setter + @beartype + def value(self, new_value: Union[int, float, str]) -> None: + """ + set the amount of Material + + Parameters + ---------- + new_value : Union[int, float, str] + amount of Material + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, value=new_value) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def unit(self) -> str: + """ + get the Quantity unit attribute + + Returns + ------- + str + unit for the Quantity value attribute + """ + return self._json_attrs.unit + + @property + @beartype + def uncertainty(self) -> Optional[Number]: + """ + get the uncertainty value + + Returns + ------- + Number + uncertainty value + """ + return self._json_attrs.uncertainty # type: ignore + + @property + @beartype + def uncertainty_type(self) -> str: + """ + get the uncertainty type attribute for the Quantity sub-object + + [Uncertainty type](https://app.criptapp.org/vocab/uncertainty_type) must come from CRIPT controlled vocabulary + + Returns + ------- + str + uncertainty type + """ + return self._json_attrs.uncertainty_type + + @beartype + def set_uncertainty(self, uncertainty: Number, type: str) -> None: + """ + set the `uncertainty value` and `uncertainty_type` + + Uncertainty and uncertainty type are set at the same time to keep the value and type in sync + + `uncertainty_type` must come from [CRIPT Controlled Vocabulary]() + + Examples + -------- + ```python + my_property.set_uncertainty(uncertainty=1, type="stderr") + ``` + + Parameters + ---------- + uncertainty : Number + uncertainty value + type : str + type of uncertainty, uncertainty_type must come from [CRIPT Controlled Vocabulary]() + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, uncertainty=uncertainty, uncertainty_type=type) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/subobjects/software.py b/src/cript/nodes/subobjects/software.py new file mode 100644 index 000000000..4ee60ad35 --- /dev/null +++ b/src/cript/nodes/subobjects/software.py @@ -0,0 +1,194 @@ +from dataclasses import dataclass, replace + +from beartype import beartype + +from cript.nodes.uuid_base import UUIDBaseNode + + +class Software(UUIDBaseNode): + """ + ## Definition + + The [Software](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=16) + node contains metadata for a computation tool, code, programming language, or software package. + + Similar to the [reference](../../primary_nodes/reference) node, the software node does not contain the base + attributes and is meant to always be public and static. + + --- + + ## Can Be Added To: + * [Software_Configuration](../../subobjects/software_configuration) + + ## Available sub-objects + * None + + --- + + ## Attributes + + | attribute | type | example | description | required | vocab | + |-----------|------|------------|-------------------------------|----------|-------| + | name | str | LAMMPS | type of literature | True | | + | version | str | 23Jun22 | software version | True | | + | source | str | lammps.org | source of software | | | + + ## JSON Representation + ```json + { + "name":"SOMA", + "node":["Software"], + "version":"0.7.0" + "source":"https://gitlab.com/InnocentBug/SOMA", + "uid":"_:f2ec4bf2-96aa-48a3-bfbc-d1d3f090583b", + "uuid":"f2ec4bf2-96aa-48a3-bfbc-d1d3f090583b", + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + name: str = "" + version: str = "" + source: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, name: str, version: str, source: str = "", **kwargs): + """ + create Software node + + Parameters + ---------- + name : str + Software name + version : str + Software version + source : str, optional + Software source, by default "" + + Examples + -------- + ```python + my_software = cript.Software( + name="my software name", version="v1.0.0", source="https://myurl.com" + ) + ``` + + Returns + ------- + None + create Software node + """ + super().__init__(**kwargs) + + self._json_attrs = replace(self._json_attrs, name=name, version=version, source=source) + self.validate() + + @property + @beartype + def name(self) -> str: + """ + Software name + + Examples + -------- + ```python + my_software.name = "my software name" + ``` + + Returns + ------- + str + Software name + """ + return self._json_attrs.name + + @name.setter + @beartype + def name(self, new_name: str) -> None: + """ + set the name of the Software node + + Parameters + ---------- + new_name : str + new Software name + + Returns + ------- + None + """ + new_attr = replace(self._json_attrs, name=new_name) + self._update_json_attrs_if_valid(new_attr) + + @property + @beartype + def version(self) -> str: + """ + Software version + + my_software.version = "1.2.3" + + Returns + ------- + str + Software version + """ + return self._json_attrs.version + + @version.setter + @beartype + def version(self, new_version: str) -> None: + """ + set the Software version + + Parameters + ---------- + new_version : str + new Software version + + Returns + ------- + None + """ + new_attr = replace(self._json_attrs, version=new_version) + self._update_json_attrs_if_valid(new_attr) + + @property + @beartype + def source(self) -> str: + """ + Software source + + Examples + -------- + ```python + my_software.source = "https://mywebsite.com" + ``` + + Returns + ------- + str + Software source + """ + return self._json_attrs.source + + @source.setter + @beartype + def source(self, new_source: str) -> None: + """ + set the Software source + + Parameters + ---------- + new_source : str + new Software source + + Returns + ------- + None + """ + new_attr = replace(self._json_attrs, source=new_source) + self._update_json_attrs_if_valid(new_attr) diff --git a/src/cript/nodes/subobjects/software_configuration.py b/src/cript/nodes/subobjects/software_configuration.py new file mode 100644 index 000000000..8e727f83a --- /dev/null +++ b/src/cript/nodes/subobjects/software_configuration.py @@ -0,0 +1,286 @@ +from dataclasses import dataclass, field, replace +from typing import List, Optional, Union + +from beartype import beartype + +from cript.nodes.subobjects.algorithm import Algorithm +from cript.nodes.subobjects.citation import Citation +from cript.nodes.subobjects.software import Software +from cript.nodes.uuid_base import UUIDBaseNode + + +class SoftwareConfiguration(UUIDBaseNode): + """ + ## Definition + + The [software_configuration](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=24) + sub-object includes software and the set of algorithms to execute computation or computational_process. + + --- + + ## Can Be Added To: + * [Computation](../../primary_nodes/computation) + * [Computation_Process](../../primary_nodes/computation_process) + + ## Available sub-objects: + * [Algorithm](../algorithm) + * [Citation](../citation) + + --- + + ## Attributes + + | keys | type | example | description | required | vocab | + |--------------------------------------------------|-----------------|---------|------------------------------------------------------------------|----------|-------| + | software | Software | | software used | True | | + | algorithms | list[Algorithm] | | algorithms used | | | + | notes | str | | miscellaneous information, or custom data structure (e.g.; JSON) | | | + | citation | list[Citation] | | reference to a book, paper, or scholarly work | | | + + + ## JSON Representation + ```json + { + "node":["SoftwareConfiguration"], + "uid":"_:f0dc3415-635d-4590-8b1f-cd65ad8ab3fe" + "software":{ + "name":"SOMA", + "node":["Software"], + "source":"https://gitlab.com/InnocentBug/SOMA", + "uid":"_:5bf9cb33-f029-4d1b-ba53-3602036e4f75", + "uuid":"5bf9cb33-f029-4d1b-ba53-3602036e4f75", + "version":"0.7.0" + } + } + ``` + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + software: Union[Software, None] = None + algorithm: List[Algorithm] = field(default_factory=list) + notes: str = "" + citation: List[Citation] = field(default_factory=list) + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, software: Software, algorithm: Optional[List[Algorithm]] = None, notes: str = "", citation: Union[List[Citation], None] = None, **kwargs): + """ + Create Software_Configuration sub-object + + + Parameters + ---------- + software : Software + Software node used for the Software_Configuration + algorithm : Union[List[Algorithm], None], optional + algorithm used for the Software_Configuration, by default None + notes : str, optional + plain text notes, by default "" + citation : Union[List[Citation], None], optional + list of Citation sub-object, by default None + + Examples + --------- + ```python + import cript + + my_software = cript.Software(name="LAMMPS", version="23Jun22", source="lammps.org") + + my_software_configuration = cript.SoftwareConfiguration(software=my_software) + ``` + + Returns + ------- + None + Create Software_Configuration sub-object + """ + if algorithm is None: + algorithm = [] + if citation is None: + citation = [] + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, software=software, algorithm=algorithm, notes=notes, citation=citation) + self.validate() + + @property + @beartype + def software(self) -> Union[Software, None]: + """ + Software used + + Examples + -------- + ```python + my_software = cript.Software( + name="my software name", version="v1.0.0", source="https://myurl.com" + ) + + my_software_configuration.software = my_software + ``` + + Returns + ------- + Union[Software, None] + Software node used + """ + return self._json_attrs.software + + @software.setter + @beartype + def software(self, new_software: Union[Software, None]) -> None: + """ + set the Software used + + Parameters + ---------- + new_software : Union[Software, None] + new Software node + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, software=new_software) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def algorithm(self) -> List[Algorithm]: + """ + list of Algorithms used + + Examples + -------- + ```python + my_algorithm = cript.Algorithm(key="mc_barostat", type="barostat") + + my_software_configuration.algorithm = [my_algorithm] + ``` + + Returns + ------- + List[Algorithm] + list of algorithms used + """ + return self._json_attrs.algorithm.copy() + + @algorithm.setter + @beartype + def algorithm(self, new_algorithm: List[Algorithm]) -> None: + """ + set the list of Algorithms + + Parameters + ---------- + new_algorithm : List[Algorithm] + list of algorithms + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, algorithm=new_algorithm) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def notes(self) -> str: + """ + miscellaneous information, or custom data structure (e.g.; JSON). Notes can be written in plain text or JSON + + Examples + -------- + ### Plain Text + ```json + my_software_configuration.notes = "these are my awesome notes!" + ``` + + ### JSON Notes + ```python + my_software_configuration.notes = "{'notes subject': 'notes contents'}" + ``` + + Returns + ------- + str + notes + """ + return self._json_attrs.notes + + @notes.setter + @beartype + def notes(self, new_notes: str) -> None: + """ + set notes for Software_configuration + + Parameters + ---------- + new_notes : str + new notes in plain text or JSON + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, notes=new_notes) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def citation(self) -> List[Citation]: + """ + list of Citation sub-objects for the Software_Configuration + + Examples + -------- + ```python + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "SOft coarse grained Monte-Carlo Acceleration (SOMA)" + + # create reference node + my_reference = cript.Reference( + type"journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + + # create citation sub-object and add reference to it + my_citation = Citation("reference", my_reference) + + # add citation to algorithm node + my_software_configuration.citation = [my_citation] + ``` + + Returns + ------- + List[Citation] + list of Citations + """ + return self._json_attrs.citation.copy() + + @citation.setter + @beartype + def citation(self, new_citation: List[Citation]) -> None: + """ + set the Citation sub-object + + Parameters + ---------- + new_citation : List[Citation] + new list of Citation sub-objects + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, citation=new_citation) + self._update_json_attrs_if_valid(new_attrs) diff --git a/src/cript/nodes/supporting_nodes/__init__.py b/src/cript/nodes/supporting_nodes/__init__.py new file mode 100644 index 000000000..dc07c7eef --- /dev/null +++ b/src/cript/nodes/supporting_nodes/__init__.py @@ -0,0 +1,3 @@ +# trunk-ignore-all(ruff/F401) +from cript.nodes.supporting_nodes.file import File +from cript.nodes.supporting_nodes.user import User diff --git a/src/cript/nodes/supporting_nodes/file.py b/src/cript/nodes/supporting_nodes/file.py new file mode 100644 index 000000000..d540b8a9b --- /dev/null +++ b/src/cript/nodes/supporting_nodes/file.py @@ -0,0 +1,446 @@ +import os +from dataclasses import dataclass, replace +from pathlib import Path +from typing import Union + +from beartype import beartype + +from cript.nodes.primary_nodes.primary_base_node import PrimaryBaseNode + + +def _is_local_file(file_source: str) -> bool: + """ + Determines if the file the user is uploading is a local file or a link. + + It basically tests if the path exists, and it is specifically a file + on the local storage and not just a valid directory + + Notes + ----- + since checking for URL is very easy because it has to start with HTTP it checks that as well + if it starts with http then it makes the work easy, and it is automatically web URL + + Parameters + ---------- + file_source: str + The source of the file. + + Returns + ------- + bool + True if the file is local, False if it's a link or s3 object_name. + """ + + # convert local or relative file path str into a path object and resolve it to always get an absolute path + file_source_abs_path: str = str(Path(file_source).resolve()) + + # if it doesn't start with HTTP and exists on disk + # checking "http" so it works with both "https://" and "http://" + if not file_source.startswith("http") and os.path.isfile(file_source_abs_path): + return True + + else: + return False + + +def _upload_file_and_get_object_name(source: Union[str, Path], api=None) -> str: + """ + uploads file to cloud storage and returns the file link + + 1. checks if the source is a local file path and not a web url + 1. if it is a local file path, then it uploads it to cloud storage + * returns the file link in cloud storage + 1. else it returns the same file link because it is already on the web + + Parameters + ---------- + source: str + file source can be a relative or absolute file string or pathlib object + + Returns + ------- + str + file AWS S3 link + """ + from cript.api.api import _get_global_cached_api + + # convert source to str for `_is_local_file` and to return str + source = str(source) + + if _is_local_file(file_source=source): + if api is None: + api = _get_global_cached_api() + object_name = api.upload_file(file_path=source) + # always getting a string for object_name + source = str(object_name) + + # always returning a string + return source + + +class File(PrimaryBaseNode): + """ + ## Definition + + The [File node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001 + .pdf#page=28) provides a link to scholarly work and allows users to specify in what way the work relates to that + data. More specifically, users can specify that the data was directly extracted from, inspired by, derived from, + etc. + + The file node is held in the [Data node](../../primary_nodes/data). + + ## Attributes + + | Attribute | Type | Example | Description | Required | + |-----------------|------|-------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------|----------| + | source | str | `"path/to/my/file"` or `"https://en.wikipedia.org/wiki/Simplified_molecular-input_line-entry_system"` | source to the file can be URL or local path | True | + | type | str | `"logs"` | Pick from [CRIPT File Types](https://app.criptapp.org/vocab/file-type/) | True | + | extension | str | `".csv"` | file extension | False | + | data_dictionary | str | `"my extra info in my data dictionary"` | set of information describing the contents, format, and structure of a file | False | + + ## JSON + ``` json + { + "node": ["File"], + "source": "https://criptapp.org", + "type": "calibration", + "extension": ".csv", + "data_dictionary": "my file's data dictionary", + } + ``` + + """ + + @dataclass(frozen=True) + class JsonAttributes(PrimaryBaseNode.JsonAttributes): + """ + all file attributes + """ + + source: str = "" + type: str = "" + extension: str = "" + data_dictionary: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, name: str, source: str, type: str, extension: str = "", data_dictionary: str = "", notes: str = "", **kwargs): + """ + create a File node + + Parameters + ---------- + name: str + File node name + source: str + link or path to local file + type: str + Pick a file type from CRIPT controlled vocabulary [File types]() + extension:str + file extension + data_dictionary:str + extra information describing the file + notes: str + notes for the file node + **kwargs:dict + for internal use. Any extra data needed to create this file node + when deserializing the JSON response from the API + + Examples + -------- + ??? Example "Minimal File Node" + ```python + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + ) + ``` + + ??? Example "Maximal File Node" + ```python + my_file = cript.File( + source="https://criptapp.org", + type="calibration", + extension=".csv", + data_dictionary="my file's data dictionary" + notes="my notes for this file" + ) + ``` + """ + + super().__init__(name=name, notes=notes, **kwargs) + + # TODO check if vocabulary is valid or not + # is_vocab_valid("file type", type) + + # setting every attribute except for source, which will be handled via setter + self._json_attrs = replace( + self._json_attrs, + type=type, + # always giving the function the required str regardless if the input `Path` or `str` + source=str(source), + extension=extension, + data_dictionary=data_dictionary, + ) + + self.validate() + + def ensure_uploaded(self, api=None): + """ + Ensure that a local file is being uploaded into CRIPT accessible cloud storage. + After this call, non-local files (file names that do not start with `http`) are uploaded. + It is not necessary to call this function manually. + A saved project automatically ensures uploaded files, it is recommend to rely on the automatic upload. + + Parameters: + ----------- + + api: cript.API, optional + API object that performs the upload. + If None, the globally cached object is being used. + + Examples + -------- + ??? Example "Minimal File Node" + ```python + my_file = cript.File(source="/local/path/to/file", type="calibration") + my_file.ensure_uploaded() + my_file.source # Starts with http now + ``` + + """ + + if _is_local_file(file_source=self.source): + # upload file source if local file + self.source = _upload_file_and_get_object_name(source=self.source) + + # TODO can be made into a function + + # --------------- Properties --------------- + @property + @beartype + def source(self) -> str: + """ + The File node source can be set to be either a path to a local file on disk + or a URL path to a file on the web. + + Example + -------- + URL File Source + ```python + my_file.source = "https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf" + ``` + Local File Path + ```python + my_file.source = "/home/user/project/my_file.csv" + ``` + + Returns + ------- + source: str + A string representing the file source. + """ + return self._json_attrs.source + + @source.setter + @beartype + def source(self, new_source: str) -> None: + """ + sets the source of the file node + the source can either be a path to a file on local storage or a link to a file + + 1. checks if the file source is a link or a local file path + 2. if the source is a link such as `https://wikipedia.com` then it sets the URL as the file source + 3. if the file source is a local file path such as + `C:\\Users\\my_username\\Desktop\\cript\\file.txt` + 1. then it opens the file and reads it + 2. uploads it to the cloud storage + 3. gets back a URL from where in the cloud the file is found + 4. sets that as the source + + Parameters + ---------- + new_source: str + + Example + ------- + ```python + my_file.source = "https://pubs.acs.org/doi/10.1021/acscentsci.3c00011" + ``` + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, source=new_source) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def type(self) -> str: + """ + The [File type](https://app.criptapp.org/vocab/file_type) must come from CRIPT controlled vocabulary + + Example + ------- + ```python + my_file.type = "calibration" + ``` + + Returns + ------- + file type: str + file type must come from [CRIPT controlled vocabulary]() + """ + return self._json_attrs.type + + @type.setter + @beartype + def type(self, new_type: str) -> None: + """ + set the file type + + file type must come from CRIPT controlled vocabulary + + Parameters + ----------- + new_type: str + + Example + ------- + ```python + my_file.type = "computation_config" + ``` + + Returns + ------- + None + """ + # TODO check vocabulary is valid + # is_vocab_valid("file type", self._json_attrs.type) + new_attrs = replace(self._json_attrs, type=new_type) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def extension(self) -> str: + """ + The file extension property explicitly states what is the file extension of the file node. + + Example + ------- + ```python + my_file_node.extension = ".csv"` + ``` + + Returns + ------- + extension: str + file extension + """ + return self._json_attrs.extension + + @extension.setter + @beartype + def extension(self, new_extension) -> None: + """ + sets the new file extension + + Parameters + ---------- + new_extension: str + new file extension to overwrite the current file extension + + Example + ------- + ```python + my_file.extension = ".pdf" + ``` + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, extension=new_extension) + self._update_json_attrs_if_valid(new_attrs) + + @property + @beartype + def data_dictionary(self) -> str: + # TODO data dictionary needs documentation describing it and how to use it + """ + The data dictionary contains additional information + that the scientist needs to describe their file. + + Notes + ------ + It is advised for this field to be written in JSON format + + Examples + ------- + ```python + my_file.data_dictionary = "{'notes': 'This is something that describes my file node.'}" + ``` + + Returns + ------- + data_dictionary: str + the file data dictionary attribute + """ + return self._json_attrs.data_dictionary + + @data_dictionary.setter + @beartype + def data_dictionary(self, new_data_dictionary: str) -> None: + """ + Sets the data dictionary for the file node. + + Parameters + ---------- + new_data_dictionary: str + The new data dictionary to be set. + + Returns + ------- + None + """ + new_attrs = replace(self._json_attrs, data_dictionary=new_data_dictionary) + self._update_json_attrs_if_valid(new_attrs) + + @beartype + def download( + self, + destination_directory_path: Union[str, Path] = ".", + ) -> None: + """ + download this file to current working directory or a specific destination. + The file name will come from the file_node.name and the extension will come from file_node.extension + + Notes + ----- + Whether the file extension is written like `.csv` or `csv` the program will work correctly + + Parameters + ---------- + destination_directory_path: Union[str, Path] + where you want the file to be stored and what you want the name to be + by default it is the current working directory + + Returns + ------- + None + """ + from cript.api.api import _get_global_cached_api + + api = _get_global_cached_api() + + # convert the path from str to Path in case it was given as a str and resolve it to get the absolute path + existing_folder_path = Path(destination_directory_path).resolve() + + # stripping dot from extension to make all extensions uniform, in case a user puts `.csv` or `csv` it will work + file_name = f"{self.name}.{self.extension.lstrip('.')}" + + absolute_file_path = str((existing_folder_path / file_name).resolve()) + + api.download_file(file_source=self.source, destination_path=absolute_file_path) diff --git a/src/cript/nodes/supporting_nodes/user.py b/src/cript/nodes/supporting_nodes/user.py new file mode 100644 index 000000000..c5374d0e6 --- /dev/null +++ b/src/cript/nodes/supporting_nodes/user.py @@ -0,0 +1,150 @@ +from dataclasses import dataclass, replace +from typing import Optional, Union + +from beartype import beartype + +from cript.nodes.uuid_base import UUIDBaseNode + + +class User(UUIDBaseNode): + """ + The [User node](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=27) + represents any researcher or individual who interacts with the CRIPT platform. + It serves two main purposes: + 1. It plays a core role in permissions (access control) + 1. It provides a traceable link to the individual who has contributed or edited data within the database + + + | attribute | type | example | description | required | vocab | + |------------|-------------|----------------------------|--------------------------------------------|----------|-------| + | url | str | | unique ID of the node | True | | + | username | str | "john_doe" | User’s name | True | | + | email | str | "user@cript.com" | email of the user | True | | + | orcid | str | "0000-0000-0000-0000" | ORCID ID of the user | True | | + | updated_at | datetime* | 2023-03-06 18:45:23.450248 | last date the node was modified (UTC time) | True | | + | created_at | datetime* | 2023-03-06 18:45:23.450248 | date it was created (UTC time) | True | | + + + ## JSON + ```json + { + "node": "User", + "username": "my username", + "email": "user@email.com", + "orcid": "0000-0000-0000-0001", + } + ``` + + Warnings + ------- + * A User cannot be created or modified using the Python SDK. + * A User node is a **read-only** node that can only be deserialized from API JSON response to Python node. + * The User node cannot be instantiated and within the Python SDK. + * Attempting to edit the user node will result in an `Attribute Error` + + """ + + @dataclass(frozen=True) + class JsonAttributes(UUIDBaseNode.JsonAttributes): + """ + all User attributes + """ + + email: Optional[str] = "" + model_version: str = "" + orcid: Optional[str] = "" + picture: str = "" + username: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + @beartype + def __init__(self, username: str, email: Optional[str] = "", orcid: Optional[str] = "", **kwargs): + """ + Json from CRIPT API to be converted to a node + optionally the group can be None if the user doesn't have a group + + Parameters + ---------- + username: str + user username + email: str + user email + orcid: str + user ORCID + """ + super().__init__(**kwargs) + self._json_attrs = replace(self._json_attrs, username=username, email=email, orcid=orcid) + + self.validate() + + @property + @beartype + def created_at(self) -> str: + return self._json_attrs.created_at + + @property + @beartype + def email(self) -> Union[str, None]: + """ + user's email + + Raises + ------ + AttributeError + + Returns + ------- + user email: str + User node email + """ + return self._json_attrs.email + + @property + @beartype + def model_version(self) -> str: + return self._json_attrs.model_version + + @property + @beartype + def orcid(self) -> Union[str, None]: + """ + users [ORCID](https://orcid.org/) + + Raises + ------ + AttributeError + + Returns + ------- + ORCID: str + user's ORCID + """ + return self._json_attrs.orcid + + @property + @beartype + def picture(self) -> str: + return self._json_attrs.picture + + @property + @beartype + def updated_at(self) -> str: + return self._json_attrs.updated_at + + @property + @beartype + def username(self) -> str: + """ + username of the User node + + Raises + ------ + AttributeError + + Returns + ------- + username: str + username of the User node + """ + return self._json_attrs.username diff --git a/src/cript/nodes/util/__init__.py b/src/cript/nodes/util/__init__.py new file mode 100644 index 000000000..a9b4522c6 --- /dev/null +++ b/src/cript/nodes/util/__init__.py @@ -0,0 +1,508 @@ +import dataclasses +import inspect +import json +import uuid +from typing import Dict, List, Optional, Set, Union + +import cript.nodes +from cript.nodes.core import BaseNode +from cript.nodes.exceptions import ( + CRIPTDeserializationUIDError, + CRIPTJsonDeserializationError, + CRIPTJsonNodeError, + CRIPTOrphanedComputationalProcessError, + CRIPTOrphanedComputationError, + CRIPTOrphanedDataError, + CRIPTOrphanedMaterialError, + CRIPTOrphanedProcessError, +) +from cript.nodes.primary_nodes.experiment import Experiment +from cript.nodes.primary_nodes.project import Project + + +class NodeEncoder(json.JSONEncoder): + """ + Custom JSON encoder for serializing CRIPT nodes to JSON. + + This encoder is used to convert CRIPT nodes into JSON format while handling unique identifiers (UUIDs) and + condensed representations to avoid redundancy in the JSON output. + It also allows suppressing specific attributes from being included in the serialized JSON. + + Attributes + ---------- + handled_ids : Set[str] + A set to store the UIDs of nodes that have been processed during serialization. + known_uuid : Set[str] + A set to store the UUIDs of nodes that have been previously encountered in the JSON. + condense_to_uuid : Dict[str, Set[str]] + A set to store the node types that should be condensed to UUID edges in the JSON. + suppress_attributes : Optional[Dict[str, Set[str]]] + A dictionary that allows suppressing specific attributes for nodes with the corresponding UUIDs. + + Methods + ------- + ```python + default(self, obj: Any) -> Any: + # Convert CRIPT nodes and other objects to their JSON representation. + ``` + + ```python + _apply_modifications(self, serialize_dict: dict) -> Tuple[dict, List[str]]: + # Apply modifications to the serialized dictionary based on node types + # and attributes to be condensed. This internal function handles node + # condensation and attribute suppression during serialization. + ``` + """ + + handled_ids: Set[str] = set() + known_uuid: Set[str] = set() + condense_to_uuid: Dict[str, Set[str]] = dict() + suppress_attributes: Optional[Dict[str, Set[str]]] = None + + def default(self, obj): + """ + Convert CRIPT nodes and other objects to their JSON representation. + + This method is called during JSON serialization. + It customizes the serialization process for CRIPT nodes and handles unique identifiers (UUIDs) + to avoid redundant data in the JSON output. + It also allows for attribute suppression for specific nodes. + + Parameters + ---------- + obj : Any + The object to be serialized to JSON. + + Returns + ------- + dict + The JSON representation of the input object, which can be a string, a dictionary, or any other JSON-serializable type. + + Raises + ------ + CRIPTJsonDeserializationError + If there is an issue with the JSON deserialization process for CRIPT nodes. + + Notes + ----- + * If the input object is a UUID, it is converted to a string representation and returned. + * If the input object is a CRIPT node (an instance of `BaseNode`), it is serialized into a dictionary + representation. The node is first checked for uniqueness based on its UID (unique identifier), and if + it has already been serialized, it is represented as a UUID edge only. If not, the node's attributes + are added to the dictionary representation, and any default attribute values are removed to reduce + redundancy in the JSON output. + * The method `_apply_modifications()` is called to check if further modifications are needed before + considering the dictionary representation done. This includes condensing certain node types to UUID edges + and suppressing specific attributes for nodes. + """ + if isinstance(obj, uuid.UUID): + return str(obj) + if isinstance(obj, BaseNode): + try: + uid = obj.uid + except AttributeError: + pass + else: + if uid in NodeEncoder.handled_ids: + return {"uid": uid} + + # When saving graphs, some nodes can be pre-saved. + # If that happens, we want to represent them as a UUID edge only + try: + uuid_str = str(obj.uuid) + except AttributeError: + pass + else: + if uuid_str in NodeEncoder.known_uuid: + return {"uuid": uuid_str} + + default_dataclass = obj.JsonAttributes() + serialize_dict = {} + # Remove default values from serialization + for field_name in [field.name for field in dataclasses.fields(default_dataclass)]: + if getattr(default_dataclass, field_name) != getattr(obj._json_attrs, field_name): + serialize_dict[field_name] = getattr(obj._json_attrs, field_name) + # add the default node type + serialize_dict["node"] = obj._json_attrs.node + + # check if further modifications to the dict is needed before considering it done + serialize_dict, condensed_uid = self._apply_modifications(serialize_dict) + if uid not in condensed_uid: # We can uid (node) as handled if we don't condense it to uuid + NodeEncoder.handled_ids.add(uid) + + # Remove suppressed attributes + if NodeEncoder.suppress_attributes is not None and str(obj.uuid) in NodeEncoder.suppress_attributes: + for attr in NodeEncoder.suppress_attributes[str(obj.uuid)]: + del serialize_dict[attr] + + return serialize_dict + return json.JSONEncoder.default(self, obj) + + def _apply_modifications(self, serialize_dict: Dict): + """ + Checks the serialize_dict to see if any other operations are required before it + can be considered done. If other operations are required, then it passes it to the other operations + and at the end returns the fully finished dict. + + This function is essentially a big switch case that checks the node type + and determines what other operations are required for it. + + Parameters + ---------- + serialize_dict: dict + + Returns + ------- + serialize_dict: dict + """ + + def process_attribute(attribute): + def strip_to_edge_uuid(element): + # Extracts UUID and UID information from the element + try: + uuid = getattr(element, "uuid") + except AttributeError: + uuid = element["uuid"] + if len(element) == 1: # Already a condensed element + return element, None + try: + uid = getattr(element, "uid") + except AttributeError: + uid = element["uid"] + + element = {"uuid": str(uuid)} + return element, uid + + # Processes an attribute based on its type (list or single element) + if isinstance(attribute, list): + processed_elements = [] + for element in attribute: + processed_element, uid = strip_to_edge_uuid(element) + if uid is not None: + uid_of_condensed.append(uid) + processed_elements.append(processed_element) + return processed_elements + else: + processed_attribute, uid = strip_to_edge_uuid(attribute) + if uid is not None: + uid_of_condensed.append(uid) + return processed_attribute + + uid_of_condensed: List = [] + + nodes_to_condense = serialize_dict["node"] + for node_type in nodes_to_condense: + if node_type in self.condense_to_uuid: + attributes_to_process = self.condense_to_uuid[node_type] # type: ignore + for attribute in attributes_to_process: + if attribute in serialize_dict: + attribute_to_condense = serialize_dict[attribute] + processed_attribute = process_attribute(attribute_to_condense) + serialize_dict[attribute] = processed_attribute + + # Check if the node is "Material" and convert the identifiers list to JSON fields + if serialize_dict["node"] == ["Material"]: + serialize_dict = _material_identifiers_list_to_json_fields(serialize_dict) + + return serialize_dict, uid_of_condensed + + +class _UIDProxy: + """ + Proxy class for unresolvable UID nodes. + This is going to be replaced by actual nodes. + + Report a bug if you find this class in production. + """ + + def __init__(self, uid: str): + self.uid = uid + print("proxy", uid) + + +class _NodeDecoderHook: + def __init__(self, uid_cache: Optional[Dict] = None): + """ + Initialize the custom JSON object hook used for CRIPT node deserialization. + + Parameters + ---------- + uid_cache : Optional[Dict], optional + A dictionary to cache Python objects with shared UIDs, by default None. + + Notes + ----- + The `_NodeDecoderHook` class is used as an object hook for JSON deserialization, + handling the conversion of JSON nodes into Python objects based on their node types. + The `uid_cache` is an optional dictionary to store cached objects with shared UIDs + to never create two different python nodes with the same uid. + """ + if uid_cache is None: + uid_cache = {} + self._uid_cache = uid_cache + + def __call__(self, node_str: Union[dict, str]) -> dict: + """ + Internal function, used as a hook for json deserialization. + + This function is called recursively to convert every JSON of a node and its children from node to JSON. + + If given a JSON without a "node" field then it is assumed that it is not a node + and just a key value pair data, and the JSON string is then just converted from string to dict and returned + applicable for places where the data is something like + + ```json + { "bigsmiles": "123456" } + ``` + + no serialization is needed in this case and just needs to be converted from str to dict + + if the node field is present, then continue and convert the JSON node into a Python object + + Parameters + ---------- + node_str : Union[dict, str] + The JSON representation of a node or a regular dictionary. + + Returns + ------- + Union[CRIPT Node, dict] + Either returns a regular dictionary if the input JSON or input dict is NOT a node. + If it is a node, it returns the appropriate CRIPT node object, such as `cript.Material` + + Raises + ------ + CRIPTJsonNodeError + If there is an issue with the JSON structure or the node type is invalid. + CRIPTJsonDeserializationError + If there is an error during deserialization of a specific node type. + CRIPTDeserializationUIDError + If there is an issue with the UID used for deserialization, like circular references. + """ + node_dict = dict(node_str) # type: ignore + + # Handle UID objects. + if len(node_dict) == 1 and "uid" in node_dict: + try: + return self._uid_cache[node_dict["uid"]] + except KeyError: + # TODO if we convince beartype to accept Proxy temporarily, enable return instead of raise + raise CRIPTDeserializationUIDError("Unknown", node_dict["uid"]) + # return _UIDProxy(node_dict["uid"]) + + try: + node_type_list = node_dict["node"] + except KeyError: # Not a node, just a regular dictionary + return node_dict + + # TODO consider putting this into the try because it might need error handling for the dict + if _is_node_field_valid(node_type_list): + node_type_str = node_type_list[0] + else: + raise CRIPTJsonNodeError(node_type_list, str(node_str)) + + # Iterate over all nodes in cript to find the correct one here + for key, pyclass in inspect.getmembers(cript.nodes, inspect.isclass): + if BaseNode in inspect.getmro(pyclass): + if key == node_type_str: + try: + json_node = pyclass._from_json(node_dict) + self._uid_cache[json_node.uid] = json_node + return json_node + except Exception as exc: + raise CRIPTJsonDeserializationError(key, str(node_type_str)) from exc + # Fall back + return node_dict + + +def _material_identifiers_list_to_json_fields(serialize_dict: dict) -> dict: + """ + input: + ```json + { + "node":["Material"], + "name":"my material", + "identifiers":[ {"cas":"my material cas"} ], + "uid":"_:a78203cb-82ea-4376-910e-dee74088cd37" + } + ``` + + output: + ```json + { + "node":["Material"], + "name":"my material", + "cas":"my material cas", + "uid":"_:08018f4a-e8e3-4ac0-bdad-fa704fdc0145" + } + ``` + + Parameters + ---------- + serialize_dict: dict + the serialized dictionary of the node + + Returns + ------- + serialized_dict = dict + new dictionary that has converted the list of dictionary identifiers into the dictionary as fields + + """ + + # TODO this if statement might not be needed in future + if "identifiers" in serialize_dict: + for identifier in serialize_dict["identifiers"]: + for key, value in identifier.items(): + serialize_dict[key] = value + + # remove identifiers list of objects after it has been flattened + del serialize_dict["identifiers"] + + return serialize_dict + + +def _rename_field(serialize_dict: dict, old_name: str, new_name: str) -> dict: + """ + renames `property_` to `property` the JSON + """ + if "property_" in serialize_dict: + serialize_dict[new_name] = serialize_dict.pop(old_name) + + return serialize_dict + + +def _is_node_field_valid(node_type_list: list) -> bool: + """ + a simple function that checks if the node field has only a single node type in there + and not 2 or None + + Parameters + ---------- + node_type_list: list + e.g. "node": ["Material"] + + Returns + ------ + bool + if all tests pass then it returns true, otherwise false + """ + + # TODO consider having exception handling for the dict + if isinstance(node_type_list, list) and len(node_type_list) == 1 and isinstance(node_type_list[0], str) and len(node_type_list[0]) > 0: + return True + else: + return False + + +def load_nodes_from_json(nodes_json: str): + """ + User facing function, that return a node and all its children from a json string input. + + Parameters + ---------- + nodes_json: str + JSON string representation of a CRIPT node + + Examples + -------- + ```python + # get project node from API + my_paginator = cript_api.search( + node_type=cript.Project, + search_mode=cript.SearchModes.EXACT_NAME, + value_to_search=project_node.name + ) + + # get the project from paginator + my_project_from_api_dict = my_paginator.current_page_results[0] + + # convert API JSON to CRIPT Project node + my_project_from_api = cript.load_nodes_from_json(json.dumps(my_project_from_api_dict)) + ``` + + Raises + ------ + CRIPTJsonNodeError + If there is an issue with the JSON of the node field. + CRIPTJsonDeserializationError + If there is an error during deserialization of a specific node. + CRIPTDeserializationUIDError + If there is an issue with the UID used for deserialization, like circular references. + + Notes + ----- + This function uses a custom `_NodeDecoderHook` to convert JSON nodes into Python objects. + The `_NodeDecoderHook` class is responsible for handling the deserialization of nodes + and caching objects with shared UIDs to avoid redundant deserialization. + + The function is intended for deserializing CRIPT nodes and should not be used for generic JSON. + + + Returns + ------- + Union[CRIPT Node, List[CRIPT Node]] + Typically returns a single CRIPT node, + but if given a list of nodes, then it will serialize them and return a list of CRIPT nodes + """ + node_json_hook = _NodeDecoderHook() + json_nodes = json.loads(nodes_json, object_hook=node_json_hook) + + # TODO: enable this logic to replace proxies, once beartype is OK with that. + # def recursive_proxy_replacement(node, handled_nodes): + # if isinstance(node, _UIDProxy): + # try: + # node = node_json_hook._uid_cache[node.uid] + # except KeyError as exc: + # raise CRIPTDeserializationUIDError(node.node_type, node.uid) + # return node + # handled_nodes.add(node.uid) + # for field in node._json_attrs.__dict__: + # child_node = getattr(node._json_attrs, field) + # if not isinstance(child_node, list): + # if hasattr(cn, "__bases__") and BaseNode in child_node.__bases__: + # child_node = recursive_proxy_replacement(child_node, handled_nodes) + # node._json_attrs = replace(node._json_attrs, field=child_node) + # else: + # for i, cn in enumerate(child_node): + # if hasattr(cn, "__bases__") and BaseNode in cn.__bases__: + # if cn.uid not in handled_nodes: + # child_node[i] = recursive_proxy_replacement(cn, handled_nodes) + + # return node + # handled_nodes = set() + # recursive_proxy_replacement(json_nodes, handled_nodes) + return json_nodes + + +def add_orphaned_nodes_to_project(project: Project, active_experiment: Experiment, max_iteration: int = -1): + """ + Helper function that adds all orphaned material nodes of the project graph to the + `project.materials` attribute. + Material additions only is permissible with `active_experiment is None`. + This function also adds all orphaned data, process, computation and computational process nodes + of the project graph to the `active_experiment`. + This functions call `project.validate` and might raise Exceptions from there. + """ + if active_experiment is not None and active_experiment not in project.find_children({"node": ["Experiment"]}): + raise RuntimeError(f"The provided active experiment {active_experiment} is not part of the project graph. Choose an active experiment that is part of a collection of this project.") + + counter = 0 + while True: + if counter > max_iteration >= 0: + break # Emergency stop + try: + project.validate() + except CRIPTOrphanedMaterialError as exc: + # because calling the setter calls `validate` we have to force add the material. + project._json_attrs.material.append(exc.orphaned_node) + except CRIPTOrphanedDataError as exc: + active_experiment.data += [exc.orphaned_node] + except CRIPTOrphanedProcessError as exc: + active_experiment.process += [exc.orphaned_node] + except CRIPTOrphanedComputationError as exc: + active_experiment.computation += [exc.orphaned_node] + except CRIPTOrphanedComputationalProcessError as exc: + active_experiment.computation_process += [exc.orphaned_node] + else: + break + counter += 1 diff --git a/src/cript/nodes/util/material_deserialization.py b/src/cript/nodes/util/material_deserialization.py new file mode 100644 index 000000000..2bbf5d142 --- /dev/null +++ b/src/cript/nodes/util/material_deserialization.py @@ -0,0 +1,74 @@ +from typing import Dict, List + +import cript + + +def _deserialize_flattened_material_identifiers(json_dict: Dict) -> Dict: + """ + takes a material node in JSON format that has its identifiers as attributes and convert it to have the + identifiers within the identifiers field of a material node + + 1. gets the material identifiers controlled vocabulary from the API + 1. converts the API response from list[dicts] to just a list[str] + 1. loops through all the material identifiers and checks if they exist within the JSON dict + 1. if a material identifier is spotted in json dict, then that material identifier is moved from JSON attribute + into an identifiers field + + + ## Input + ```python + { + "node": ["Material"], + "name": "my cool material", + "uuid": "_:my cool material", + "smiles": "CCC", + "bigsmiles": "my big smiles" + } + ``` + + ## Output + ```python + { + "node":["Material"], + "name":"my cool material", + "uuid":"_:my cool material", + "identifiers":[ + {"smiles":"CCC"}, + {"bigsmiles":"my big smiles"} + ] + } + ``` + + Parameters + ---------- + json_dict: Dict + A JSON dictionary representing a node + + Returns + ------- + json_dict: Dict + A new JSON dictionary with the material identifiers moved from attributes to the identifiers field + """ + from cript.api.api import _get_global_cached_api + + api = _get_global_cached_api() + + # get material identifiers keys from API and create a simple list + # eg ["smiles", "bigsmiles", etc.] + all_identifiers_list: List[str] = [identifier.get("name") for identifier in api.get_vocab_by_category(cript.VocabCategories.MATERIAL_IDENTIFIER_KEY)] + + # pop "name" from identifiers list because the node has to have a name + all_identifiers_list.remove("name") + + identifier_argument: List[Dict] = [] + + # move material identifiers from JSON attribute to identifiers attributes + for identifier in all_identifiers_list: + if identifier in json_dict: + identifier_argument.append({identifier: json_dict[identifier]}) + # delete identifiers from the API JSON response as they are added to the material node + del json_dict[identifier] + + json_dict["identifiers"] = identifier_argument + + return json_dict diff --git a/src/cript/nodes/uuid_base.py b/src/cript/nodes/uuid_base.py new file mode 100644 index 000000000..899b278ed --- /dev/null +++ b/src/cript/nodes/uuid_base.py @@ -0,0 +1,76 @@ +import uuid +from abc import ABC +from dataclasses import dataclass, field, replace +from typing import Any, Dict + +from cript.nodes.core import BaseNode + + +def get_uuid_from_uid(uid): + return str(uuid.UUID(uid[2:])) + + +class UUIDBaseNode(BaseNode, ABC): + """ + Base node that handles UUIDs and URLs. + """ + + # Class attribute that caches all nodes created + _uuid_cache: Dict = {} + + @dataclass(frozen=True) + class JsonAttributes(BaseNode.JsonAttributes): + """ + All shared attributes between all Primary nodes and set to their default values + """ + + uuid: str = field(default_factory=lambda: str(uuid.uuid4())) + updated_by: Any = None + created_by: Any = None + created_at: str = "" + updated_at: str = "" + + _json_attrs: JsonAttributes = JsonAttributes() + + def __init__(self, **kwargs): + # initialize Base class with node + super().__init__(**kwargs) + # Respect uuid if passed as argument, otherwise construct uuid from uid + uuid = kwargs.get("uuid", get_uuid_from_uid(self.uid)) + # replace name and notes within PrimaryBase + self._json_attrs = replace(self._json_attrs, uuid=uuid) + + # Place successfully created node in the UUID cache + self._uuid_cache[uuid] = self + + @property + def uuid(self) -> uuid.UUID: + return uuid.UUID(self._json_attrs.uuid) + + @property + def url(self): + from cript.api.api import _get_global_cached_api + + api = _get_global_cached_api() + return f"{api.host}/{self.uuid}" + + def __deepcopy__(self, memo): + node = super().__deepcopy__(memo) + node._json_attrs = replace(node._json_attrs, uuid=get_uuid_from_uid(node.uid)) + return node + + @property + def updated_by(self): + return self._json_attrs.updated_by + + @property + def created_by(self): + return self._json_attrs.created_by + + @property + def updated_at(self): + return self._json_attrs.updated_at + + @property + def created_at(self): + return self._json_attrs.created_at diff --git a/tests/api/test_api.py b/tests/api/test_api.py new file mode 100644 index 000000000..7809f16da --- /dev/null +++ b/tests/api/test_api.py @@ -0,0 +1,412 @@ +import datetime +import json +import os +import tempfile +import uuid +from pathlib import Path +from typing import Dict + +import pytest +import requests +from conftest import HAS_INTEGRATION_TESTS_ENABLED + +import cript +from cript.api.exceptions import InvalidVocabulary +from cript.api.paginator import Paginator +from cript.nodes.exceptions import CRIPTNodeSchemaError + + +def test_create_api(cript_api: cript.API) -> None: + """ + tests that an API object can be successfully created with host and token + """ + # api = cript.API(host=None, api_token=None) + # + # # assertions + # assert api is not None + # assert isinstance(api, cript.API) + + pass + + +def test_api_with_invalid_host() -> None: + """ + this mostly tests the _prepare_host() function to be sure it is working as expected + * attempting to create an api client with invalid host appropriately throws a `CRIPTConnectionError` + * giving a host that does not start with http such as "criptapp.org" should throw an InvalidHostError + """ + with pytest.raises((requests.ConnectionError, cript.api.exceptions.CRIPTConnectionError)): + cript.API(host="https://some_invalid_host", api_token="123456789", storage_token="123456") + + with pytest.raises(cript.api.exceptions.InvalidHostError): + cript.API(host="no_http_host.org", api_token="123456789", storage_token="987654321") + + +@pytest.mark.skipif(not HAS_INTEGRATION_TESTS_ENABLED, reason="skipping because API client needs API token") +def test_api_context(cript_api: cript.API) -> None: + assert cript.api.api._global_cached_api is not None + assert cript.api.api._get_global_cached_api() is not None + + +def test_api_cript_env_vars() -> None: + """ + tests that when the cript.API is given None for host, api_token, storage_token that it can correctly + retrieve things from the env variable + """ + host_value = "http://development.api.mycriptapp.org/" + api_token_value = "my cript API token value" + storage_token_value = "my cript storage token value" + + # set env vars + os.environ["CRIPT_HOST"] = host_value + os.environ["CRIPT_TOKEN"] = api_token_value + os.environ["CRIPT_STORAGE_TOKEN"] = storage_token_value + + api = cript.API(host=None, api_token=None, storage_token=None) + + # host/api/v1 + assert api._host == f"{host_value}api/v1" + assert api._api_token == api_token_value + assert api._storage_token == storage_token_value + + +def test_config_file() -> None: + """ + test if the api can read configurations from `config.json` + """ + + config_file_texts = {"host": "https://development.api.mycriptapp.org", "api_token": "I am token", "storage_token": "I am storage token"} + + with tempfile.NamedTemporaryFile(mode="w+t", suffix=".json", delete=False) as temp_file: + # absolute file path + config_file_path = temp_file.name + + # write JSON to temporary file + temp_file.write(json.dumps(config_file_texts)) + + # force text to be written to file + temp_file.flush() + + api = cript.API(config_file_path=config_file_path) + + assert api._host == config_file_texts["host"] + "/api/v1" + assert api._api_token == config_file_texts["api_token"] + + +@pytest.mark.skip(reason="too early to write as there are higher priority tasks currently") +def test_api_initialization_stress() -> None: + """ + tries to put the API configuration under as much stress as it possibly can + it tries to give it mixed options and try to trip it up and create issues for it + + ## scenarios + 1. if there is a config file and other inputs, then config file wins + 1. if config file, but is missing an attribute, and it is labeled as None, then should get it from env var + 1. if there is half from input and half from env var, then both should work happily + """ + pass + + +def test_get_db_schema_from_api(cript_api: cript.API) -> None: + """ + tests that the Python SDK can successfully get the db schema from API + """ + db_schema = cript_api._get_db_schema() + + assert bool(db_schema) + assert isinstance(db_schema, dict) + + # db schema should have at least 30 fields + assert len(db_schema["$defs"]) > 30 + + +def test_is_node_schema_valid(cript_api: cript.API) -> None: + """ + test that a CRIPT node can be correctly validated and invalidated with the db schema + + * test a couple of nodes to be sure db schema validation is working fine + * material node + * file node + * test db schema validation with an invalid node, and it should be invalid + + Notes + ----- + * does not test if serialization/deserialization works correctly, + just tests if the node schema can work correctly if serialization was correct + + # TODO the tests here only test POST db schema and not PATCH yet, those tests must be added + """ + + # ------ invalid node schema------ + invalid_schema = {"invalid key": "invalid value", "node": ["Material"]} + + with pytest.raises(CRIPTNodeSchemaError): + cript_api._is_node_schema_valid(node_json=json.dumps(invalid_schema), is_patch=False) + + # ------ valid material schema ------ + # valid material node + valid_material_dict = {"node": ["Material"], "name": "0.053 volume fraction CM gel", "uid": "_:0.053 volume fraction CM gel"} + + # convert dict to JSON string because method expects JSON string + assert cript_api._is_node_schema_valid(node_json=json.dumps(valid_material_dict), is_patch=False) is True + # ------ valid file schema ------ + valid_file_dict = { + "node": ["File"], + "source": "https://criptapp.org", + "type": "calibration", + "extension": ".csv", + "data_dictionary": "my file's data dictionary", + } + + # convert dict to JSON string because method expects JSON string + assert cript_api._is_node_schema_valid(node_json=json.dumps(valid_file_dict), is_patch=False) is True + + +def test_get_vocabulary_by_category(cript_api: cript.API) -> None: + """ + tests if a vocabulary can be retrieved by category + 1. tests response is a list of dicts as expected + 1. create a new list of just material identifiers + 1. tests that the fundamental identifiers exist within the API vocabulary response + + Warnings + -------- + This test only gets the vocabulary category for "material_identifier_key" and does not test all the possible + CRIPT controlled vocabulary + """ + + material_identifier_vocab_list = cript_api.get_vocab_by_category(cript.VocabCategories.MATERIAL_IDENTIFIER_KEY) + + # test response is a list of dicts + assert isinstance(material_identifier_vocab_list, list) + + material_identifiers = [identifier["name"] for identifier in material_identifier_vocab_list] + + # assertions + assert "bigsmiles" in material_identifiers + assert "smiles" in material_identifiers + assert "pubchem_cid" in material_identifiers + + +def test_get_controlled_vocabulary_from_api(cript_api: cript.API) -> None: + """ + checks if it can successfully get the controlled vocabulary list from CRIPT API + """ + number_of_vocab_categories = 26 + vocab = cript_api._get_vocab() + + # assertions + # check vocabulary list is not empty + assert bool(vocab) is True + assert len(vocab) == number_of_vocab_categories + + +def test_is_vocab_valid(cript_api: cript.API) -> None: + """ + tests if the method for vocabulary is validating and invalidating correctly + + * test with custom key to check it automatically gives valid + * test with a few vocabulary_category and vocabulary_words + * valid category and valid vocabulary word + * test that invalid category throws the correct error + * invalid category and valid vocabulary word + * test that invalid vocabulary word throws the correct error + * valid category and invalid vocabulary word + tests invalid category and invalid vocabulary word + """ + # custom vocab + assert cript_api._is_vocab_valid(vocab_category=cript.VocabCategories.ALGORITHM_KEY, vocab_word="+my_custom_key") is True + + # valid vocab category and valid word + assert cript_api._is_vocab_valid(vocab_category=cript.VocabCategories.FILE_TYPE, vocab_word="calibration") is True + assert cript_api._is_vocab_valid(vocab_category=cript.VocabCategories.QUANTITY_KEY, vocab_word="mass") is True + assert cript_api._is_vocab_valid(vocab_category=cript.VocabCategories.UNCERTAINTY_TYPE, vocab_word="fwhm") is True + + # valid vocab category but invalid vocab word + with pytest.raises(InvalidVocabulary): + cript_api._is_vocab_valid(vocab_category=cript.VocabCategories.FILE_TYPE, vocab_word="some_invalid_word") + + +def test_download_file_from_url(cript_api: cript.API, tmp_path) -> None: + """ + downloads the file from a URL and writes it to disk + then opens, reads, and compares that the file was gotten and written correctly + """ + url_to_download_file: str = "https://criptscripts.org/cript_graph_json/JSON/cao_protein.json" + + # `download_file()` will get the file extension from the end of the URL and add it onto the name + # the path it will save it to will be `tmp_path/downloaded_file_name.json` + path_to_save_file: Path = tmp_path / "downloaded_file_name" + + cript_api.download_file(url_to_download_file, str(path_to_save_file)) + + # add file extension to file path and convert it to file path object + path_to_read_file = Path(str(path_to_save_file) + ".json").resolve() + + # open the file that was just saved and read the contents + saved_file_contents = json.loads(path_to_read_file.read_text()) + + # make a request manually to get the contents and check that the contents are the same + response: Dict = requests.get(url=url_to_download_file).json() + + # assert that the file I've save and the one on the web are the same + assert response == saved_file_contents + + +@pytest.mark.skipif(not HAS_INTEGRATION_TESTS_ENABLED, reason="requires a real storage_token from a real frontend") +def test_upload_and_download_local_file(cript_api, tmp_path_factory) -> None: + """ + tests file upload to cloud storage + test by uploading a local file to AWS S3 using cognito mode + and then downloading the same file and checking their contents are the same + proving that the file was uploaded and downloaded correctly + + 1. create a temporary file + 1. write a unique string to the temporary file via UUID4 and date + so when downloading it later the downloaded file cannot possibly be a mistake and we know + for sure that it is the correct file uploaded and downloaded + 1. upload to AWS S3 `tests/` directory + 1. we can be sure that the file has been correctly uploaded to AWS S3 if we can download the same file + and assert that the file contents are the same as original + """ + file_text: str = ( + f"This is an automated test from the Python SDK within `tests/api/test_api.py` " f"within the `test_upload_file_to_aws_s3()` test function " f"on UTC time of '{datetime.datetime.utcnow()}' " f"with the unique UUID of '{str(uuid.uuid4())}'" + ) + + # Create a temporary file with unique contents + upload_test_file = tmp_path_factory.mktemp("test_api_file_upload") / "temp_upload_file.txt" + upload_test_file.write_text(file_text) + + # upload file to AWS S3 + my_file_cloud_storage_object_name = cript_api.upload_file(file_path=upload_test_file) + + # temporary file path and new file to write the cloud storage file contents to + download_test_file = tmp_path_factory.mktemp("test_api_file_download") / "temp_download_file.txt" + + # download file from cloud storage + cript_api.download_file(file_source=my_file_cloud_storage_object_name, destination_path=str(download_test_file)) + + # read file contents + downloaded_file_contents = download_test_file.read_text() + + # assert download file contents are the same as uploaded file contents + assert downloaded_file_contents == file_text + + +@pytest.mark.skipif(not HAS_INTEGRATION_TESTS_ENABLED, reason="requires a real cript_api_token") +def test_api_search_node_type(cript_api: cript.API) -> None: + """ + tests the api.search() method with just a node type material search + + just testing that something comes back from the server + + Notes + ----- + * also tests that it can go to the next page and previous page + * later this test should be expanded to test things that it should expect an error for as well. + * test checks if there are at least 5 things in the paginator + * each page should have a max of 10 results and there should be close to 5k materials in db, + * more than enough to at least have 5 in the paginator + """ + materials_paginator = cript_api.search(node_type=cript.Material, search_mode=cript.SearchModes.NODE_TYPE, value_to_search=None) + + # test search results + assert isinstance(materials_paginator, Paginator) + assert len(materials_paginator.current_page_results) > 5 + first_page_first_result = materials_paginator.current_page_results[0]["name"] + + # just checking that the word has a few characters in it + assert len(first_page_first_result) > 3 + + # tests that it can correctly go to the next page + materials_paginator.next_page() + assert len(materials_paginator.current_page_results) > 5 + second_page_first_result = materials_paginator.current_page_results[0]["name"] + + assert len(second_page_first_result) > 3 + + # tests that it can correctly go to the previous page + materials_paginator.previous_page() + assert len(materials_paginator.current_page_results) > 5 + + assert len(first_page_first_result) > 3 + + +@pytest.mark.skipif(not HAS_INTEGRATION_TESTS_ENABLED, reason="requires a real cript_api_token") +def test_api_search_contains_name(cript_api: cript.API) -> None: + """ + tests that it can correctly search with contains name mode + searches for a material that contains the name "poly" + """ + contains_name_paginator = cript_api.search(node_type=cript.Material, search_mode=cript.SearchModes.CONTAINS_NAME, value_to_search="poly") + + assert isinstance(contains_name_paginator, Paginator) + assert len(contains_name_paginator.current_page_results) > 5 + + contains_name_first_result = contains_name_paginator.current_page_results[0]["name"] + + # just checking that the result has a few characters in it + assert len(contains_name_first_result) > 3 + + +@pytest.mark.skipif(not HAS_INTEGRATION_TESTS_ENABLED, reason="requires a real cript_api_token") +def test_api_search_exact_name(cript_api: cript.API) -> None: + """ + tests search method with exact name search + searches for material "Sodium polystyrene sulfonate" + """ + exact_name_paginator = cript_api.search(node_type=cript.Material, search_mode=cript.SearchModes.EXACT_NAME, value_to_search="Sodium polystyrene sulfonate") + + assert isinstance(exact_name_paginator, Paginator) + assert len(exact_name_paginator.current_page_results) == 1 + assert exact_name_paginator.current_page_results[0]["name"] == "Sodium polystyrene sulfonate" + + +@pytest.mark.skipif(not HAS_INTEGRATION_TESTS_ENABLED, reason="requires a real cript_api_token") +def test_api_search_uuid(cript_api: cript.API) -> None: + """ + tests search with UUID + searches for Sodium polystyrene sulfonate material that has a UUID of "fcc6ed9d-22a8-4c21-bcc6-25a88a06c5ad" + """ + # try develop result + try: + uuid_to_search = "fcc6ed9d-22a8-4c21-bcc6-25a88a06c5ad" + + uuid_paginator = cript_api.search(node_type=cript.Material, search_mode=cript.SearchModes.UUID, value_to_search=uuid_to_search) + + assert isinstance(uuid_paginator, Paginator) + assert len(uuid_paginator.current_page_results) == 1 + assert uuid_paginator.current_page_results[0]["name"] == "Sodium polystyrene sulfonate" + assert uuid_paginator.current_page_results[0]["uuid"] == uuid_to_search + + # if fail try staging result + except AssertionError: + uuid_to_search = "e1b41d34-3bf2-4cd8-9a19-6412df7e7efc" + + uuid_paginator = cript_api.search(node_type=cript.Material, search_mode=cript.SearchModes.UUID, value_to_search=uuid_to_search) + + assert isinstance(uuid_paginator, Paginator) + assert len(uuid_paginator.current_page_results) == 1 + assert uuid_paginator.current_page_results[0]["name"] == "Sodium polystyrene sulfonate" + assert uuid_paginator.current_page_results[0]["uuid"] == uuid_to_search + + +def test_get_my_user_node_from_api(cript_api: cript.API) -> None: + """ + tests that the Python SDK can successfully get the user node associated with the API Token + """ + pass + + +def test_get_my_group_node_from_api(cript_api: cript.API) -> None: + """ + tests that group node that is associated with their API Token can be gotten correctly + """ + pass + + +def test_get_my_projects_from_api(cript_api: cript.API) -> None: + """ + get a page of project nodes that is associated with the API token + """ + pass diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..4e6fe124c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,56 @@ +# trunk-ignore-all(ruff/F403) +""" +This conftest file contains simple nodes (nodes with minimal required arguments) +and complex node (nodes that have all possible arguments), to use for testing. + +Since nodes often depend on other nodes copying and pasting nodes is not ideal, +and keeping all nodes in one file makes it easier/cleaner to create tests. + +The fixtures are all functional fixtures that stay consistent between all tests. +""" +import os + +import pytest +from fixtures.primary_nodes import * +from fixtures.subobjects import * +from fixtures.supporting_nodes import * + +import cript + + +def _get_cript_tests_env() -> bool: + """ + Gets `CRIPT_TESTS` value from env variable and converts it to boolean. + If `CRIPT_TESTS` env var does not exist then it will default it to False. + """ + try: + has_integration_tests_enabled = os.getenv("CRIPT_TESTS").title().strip() == "True" + except AttributeError: + has_integration_tests_enabled = True + + return has_integration_tests_enabled + + +# flip integration tests ON or OFF with this boolean +# automatically gets value env vars to run integration tests +HAS_INTEGRATION_TESTS_ENABLED: bool = _get_cript_tests_env() + + +@pytest.fixture(scope="session", autouse=True) +def cript_api(): + """ + Create an API instance for the rest of the tests to use. + + Returns + ------- + API: cript.API + The created CRIPT API instance. + """ + storage_token = os.getenv("CRIPT_STORAGE_TOKEN") + + assert cript.api.api._global_cached_api is None + with cript.API(host=None, api_token=None, storage_token=storage_token) as api: + # using the tests folder name within our cloud storage + api._BUCKET_DIRECTORY_NAME = "tests" + yield api + assert cript.api.api._global_cached_api is None diff --git a/tests/fixtures/primary_nodes.py b/tests/fixtures/primary_nodes.py new file mode 100644 index 000000000..04b219c02 --- /dev/null +++ b/tests/fixtures/primary_nodes.py @@ -0,0 +1,321 @@ +import copy +import json +import uuid + +import pytest +from util import strip_uid_from_dict + +import cript + + +@pytest.fixture(scope="function") +def simple_project_node(simple_collection_node) -> cript.Project: + """ + create a minimal Project node with only required arguments for other tests to use + + Returns + ------- + cript.Project + """ + + return cript.Project(name="my Project name", collection=[simple_collection_node]) + + +@pytest.fixture(scope="function") +def complex_project_dict(complex_collection_node, simple_material_node, complex_user_node) -> dict: + project_dict = {"node": ["Project"]} + project_dict["locked"] = True + project_dict["model_version"] = "1.0.0" + project_dict["updated_by"] = json.loads(copy.deepcopy(complex_user_node).get_json(condense_to_uuid={}).json) + project_dict["created_by"] = json.loads(complex_user_node.get_json(condense_to_uuid={}).json) + project_dict["public"] = True + project_dict["name"] = "my project name" + project_dict["notes"] = "my project notes" + project_dict["member"] = [json.loads(complex_user_node.get_json(condense_to_uuid={}).json)] + project_dict["admin"] = [json.loads(complex_user_node.get_json(condense_to_uuid={}).json)] + project_dict["collection"] = [json.loads(complex_collection_node.get_json(condense_to_uuid={}).json)] + project_dict["material"] = [json.loads(copy.deepcopy(simple_material_node).get_json(condense_to_uuid={}).json)] + return project_dict + + +@pytest.fixture(scope="function") +def complex_project_node(complex_project_dict) -> cript.Project: + """ + a complex Project node that includes all possible optional arguments that are themselves complex as well + """ + complex_project = cript.load_nodes_from_json(json.dumps(complex_project_dict)) + return complex_project + + +@pytest.fixture(scope="function") +def simple_collection_node(simple_experiment_node) -> cript.Collection: + """ + create a simple collection node for other tests to be able to easily and cleanly reuse + + Notes + ----- + * [Collection](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8) + has no required attributes. + * The Python SDK only requires Collections to have `name` + * Since it doesn't make sense to have an empty Collection I added an Experiment to the Collection as well + """ + my_collection_name = "my collection name" + + my_collection = cript.Collection(name=my_collection_name, experiment=[simple_experiment_node]) + + return my_collection + + +@pytest.fixture(scope="function") +def complex_collection_node(simple_experiment_node, simple_inventory_node, complex_citation_node) -> cript.Collection: + """ + Collection node with all optional arguments + """ + my_collection_name = "my complex collection name" + my_cript_doi = "10.1038/1781168a0" + + my_collection = cript.Collection( + name=my_collection_name, + experiment=[simple_experiment_node], + inventory=[simple_inventory_node], + doi=my_cript_doi, + citation=[complex_citation_node], + ) + + return my_collection + + +@pytest.fixture(scope="function") +def simple_experiment_node() -> cript.Experiment: + """ + minimal experiment node to use for other tests + + Returns + ------- + Experiment + """ + + return cript.Experiment(name="my experiment name") + + +@pytest.fixture(scope="function") +def simple_computation_process_node(complex_ingredient_node, simple_data_node) -> cript.ComputationProcess: + """ + simple Computational Process node with only required arguments to use in other tests + """ + my_computational_process_type = "cross_linking" + + my_computational_process = cript.ComputationProcess( + name="my computational process name", + type=my_computational_process_type, + input_data=[copy.deepcopy(simple_data_node)], + ingredient=[complex_ingredient_node], + ) + + return my_computational_process + + +@pytest.fixture(scope="function") +def simple_data_node(complex_file_node) -> cript.Data: + """ + minimal data node + """ + my_data = cript.Data(name="my data name", type="afm_amp", file=[complex_file_node]) + + return my_data + + +@pytest.fixture(scope="function") +def complex_data_node( + complex_file_node, + simple_process_node, + simple_computation_node, + simple_computation_process_node, + simple_material_node, + complex_citation_node, +) -> None: + """ + create a complex data node with all possible arguments for all tests to use when needed + """ + my_complex_data = cript.Data( + name="my complex data node name", + type="afm_amp", + file=[copy.deepcopy(complex_file_node)], + sample_preparation=copy.deepcopy(simple_process_node), + computation=[simple_computation_node], + computation_process=[simple_computation_process_node], + material=[simple_material_node], + process=[copy.deepcopy(simple_process_node)], + citation=[copy.deepcopy(complex_citation_node)], + ) + + return my_complex_data + + +@pytest.fixture(scope="function") +def simple_process_node() -> cript.Process: + """ + simple process node to use in other tests to keep tests clean + """ + my_process = cript.Process(name="my process name", type="affinity_pure") + + return my_process + + +@pytest.fixture(scope="function") +def complex_process_node(complex_ingredient_node, simple_equipment_node, complex_citation_node, simple_property_node, simple_condition_node, simple_material_node, simple_process_node) -> None: + """ + create a process node with all possible arguments + + Notes + ----- + * indirectly tests the vocabulary as well, as it gives it valid vocabulary + """ + + my_process_name = "my complex process node name" + my_process_type = "affinity_pure" + my_process_description = "my simple material description" + + process_waste = [ + cript.Material(name="my process waste material 1", identifiers=[{"bigsmiles": "process waste bigsmiles"}]), + ] + + my_process_keywords = [ + "anionic", + "annealing_sol", + ] + + my_complex_process = cript.Process( + name=my_process_name, + type=my_process_type, + ingredient=[complex_ingredient_node], + description=my_process_description, + equipment=[simple_equipment_node], + product=[simple_material_node], + waste=process_waste, + prerequisite_process=[simple_process_node], + condition=[simple_condition_node], + property=[simple_property_node], + keyword=my_process_keywords, + citation=[complex_citation_node], + ) + + return my_complex_process + + +@pytest.fixture(scope="function") +def simple_computation_node() -> cript.Computation: + """ + simple computation node to use between tests + """ + my_computation = cript.Computation(name="my computation name", type="analysis") + + return my_computation + + +@pytest.fixture(scope="function") +def simple_material_node() -> cript.Material: + """ + simple material node to use between tests + """ + identifiers = [{"bigsmiles": "123456"}] + # Use a unique name + my_material = cript.Material(name="my test material " + str(uuid.uuid4()), identifiers=identifiers) + + return my_material + + +@pytest.fixture(scope="function") +def simple_material_dict() -> dict: + """ + the dictionary that `simple_material_node` produces + putting it in one location to make updating it easy + """ + simple_material_dict: dict = {"node": ["Material"], "name": "my material", "bigsmiles": "123456"} + + return simple_material_dict + + +@pytest.fixture(scope="function") +def complex_material_dict(simple_property_node, simple_process_node, complex_computational_forcefield_node, simple_material_node) -> cript.Material: + """ + complex Material node with all possible attributes filled + """ + my_material_keyword = ["acetylene"] + + material_dict = {"node": ["Material"]} + material_dict["name"] = "my complex material" + material_dict["property"] = [json.loads(simple_property_node.get_json(condense_to_uuid={}).json)] + material_dict["process"] = json.loads(simple_process_node.get_json(condense_to_uuid={}).json) + material_dict["parent_material"] = json.loads(simple_material_node.get_json(condense_to_uuid={}).json) + material_dict["computational_forcefield"] = json.loads(complex_computational_forcefield_node.get_json(condense_to_uuid={}).json) + material_dict["bigsmiles"] = "my complex_material_node" + material_dict["keyword"] = my_material_keyword + + return strip_uid_from_dict(material_dict) + + +@pytest.fixture(scope="function") +def complex_material_node(simple_property_node, simple_process_node, complex_computational_forcefield_node, simple_material_node) -> cript.Material: + """ + complex Material node with all possible attributes filled + """ + my_identifier = [{"bigsmiles": "my complex_material_node"}] + my_material_keyword = ["acetylene"] + + my_complex_material = cript.Material( + name="my complex material", + identifiers=my_identifier, + property=[simple_property_node], + process=copy.deepcopy(simple_process_node), + parent_material=simple_material_node, + computational_forcefield=complex_computational_forcefield_node, + keyword=my_material_keyword, + ) + + return my_complex_material + + +@pytest.fixture(scope="function") +def simple_inventory_node(simple_material_node) -> None: + """ + minimal inventory node to use for other tests + """ + # set up inventory node + + material_2 = cript.Material(name="material 2 " + str(uuid.uuid4()), identifiers=[{"bigsmiles": "my big smiles"}]) + + my_inventory = cript.Inventory(name="my inventory name", material=[simple_material_node, material_2]) + + # use my_inventory in another test + return my_inventory + + +@pytest.fixture(scope="function") +def simple_computational_process_node(simple_data_node, complex_ingredient_node) -> None: + """ + simple/minimal computational_process node with only required arguments + """ + my_computational_process = cript.ComputationProcess( + name="my computational process node name", + type="cross_linking", + input_data=[simple_data_node], + ingredient=[complex_ingredient_node], + ) + + return my_computational_process + + +@pytest.fixture(scope="function") +def simplest_computational_process_node(simple_data_node, simple_ingredient_node) -> cript.ComputationProcess: + """ + minimal computational_process node + """ + my_simplest_computational_process = cript.ComputationProcess( + name="my computational process node name", + type="cross_linking", + input_data=[simple_data_node], + ingredient=[simple_ingredient_node], + ) + + return my_simplest_computational_process diff --git a/tests/fixtures/subobjects.py b/tests/fixtures/subobjects.py new file mode 100644 index 000000000..41eff8508 --- /dev/null +++ b/tests/fixtures/subobjects.py @@ -0,0 +1,371 @@ +import copy +import json +import uuid + +import pytest +from util import strip_uid_from_dict + +import cript + + +@pytest.fixture(scope="function") +def complex_parameter_node() -> cript.Parameter: + """ + maximal parameter sub-object that has all possible node attributes + """ + parameter = cript.Parameter(key="update_frequency", value=1000.0, unit="1/second") + + return parameter + + +@pytest.fixture(scope="function") +def complex_parameter_dict() -> dict: + ret_dict = {"node": ["Parameter"], "key": "update_frequency", "value": 1000.0, "unit": "1/second"} + return ret_dict + + +# TODO this fixture should be renamed because it is simple_algorithm_subobject not complex +@pytest.fixture(scope="function") +def simple_algorithm_node() -> cript.Algorithm: + """ + minimal algorithm sub-object + """ + algorithm = cript.Algorithm(key="mc_barostat", type="barostat") + + return algorithm + + +@pytest.fixture(scope="function") +def simple_algorithm_dict() -> dict: + ret_dict = {"node": ["Algorithm"], "key": "mc_barostat", "type": "barostat"} + return ret_dict + + +@pytest.fixture(scope="function") +def complex_reference_node() -> cript.Reference: + title = "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: " + title += "SOft coarse grained Monte-Carlo Acceleration (SOMA)" + + reference = cript.Reference( + type="journal_article", + title=title, + author=["Ludwig Schneider", "Marcus Müller"], + journal="Computer Physics Communications", + publisher="Elsevier", + year=2019, + pages=[463, 476], + doi="10.1016/j.cpc.2018.08.011", + issn="0010-4655", + website="https://www.sciencedirect.com/science/article/pii/S0010465518303072", + ) + return reference + + +@pytest.fixture(scope="function") +def complex_reference_dict() -> dict: + ret_dict = { + "node": ["Reference"], + "type": "journal_article", + "title": "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: SOft coarse grained Monte-Carlo Acceleration (SOMA)", + "author": ["Ludwig Schneider", "Marcus Müller"], + "journal": "Computer Physics Communications", + "publisher": "Elsevier", + "year": 2019, + "pages": [463, 476], + "doi": "10.1016/j.cpc.2018.08.011", + "issn": "0010-4655", + "website": "https://www.sciencedirect.com/science/article/pii/S0010465518303072", + } + return ret_dict + + +@pytest.fixture(scope="function") +def complex_citation_node(complex_reference_node) -> cript.Citation: + """ + maximal citation sub-object with all possible node attributes + """ + citation = cript.Citation(type="reference", reference=complex_reference_node) + return citation + + +@pytest.fixture(scope="function") +def complex_citation_dict(complex_reference_dict) -> dict: + ret_dict = {"node": ["Citation"], "reference": complex_reference_dict, "type": "reference"} + return ret_dict + + +@pytest.fixture(scope="function") +def complex_quantity_node() -> cript.Quantity: + quantity = cript.Quantity(key="mass", value=11.2, unit="kg", uncertainty=0.2, uncertainty_type="stdev") + return quantity + + +@pytest.fixture(scope="function") +def complex_quantity_dict() -> dict: + return {"node": ["Quantity"], "key": "mass", "value": 11.2, "unit": "kg", "uncertainty": 0.2, "uncertainty_type": "stdev"} + + +@pytest.fixture(scope="function") +def complex_software_node() -> cript.Software: + software = cript.Software("SOMA", "0.7.0", "https://gitlab.com/InnocentBug/SOMA") + return software + + +@pytest.fixture(scope="function") +def complex_software_dict() -> dict: + ret_dict = {"node": ["Software"], "name": "SOMA", "version": "0.7.0", "source": "https://gitlab.com/InnocentBug/SOMA"} + return ret_dict + + +@pytest.fixture(scope="function") +def complex_property_node(complex_material_node, complex_condition_node, complex_citation_node, complex_data_node, simple_process_node, simple_computation_node): + """ + a maximal property sub-object with all possible fields filled + """ + my_complex_property = cript.Property( + key="modulus_shear", + type="value", + value=5.0, + unit="GPa", + uncertainty=0.1, + uncertainty_type="stdev", + structure="structure", + method="comp", + sample_preparation=copy.deepcopy(simple_process_node), + condition=[complex_condition_node], + computation=[copy.deepcopy(simple_computation_node)], + data=[copy.deepcopy(complex_data_node)], + citation=[complex_citation_node], + notes="my complex_property_node notes", + ) + return my_complex_property + + +@pytest.fixture(scope="function") +def complex_property_dict(complex_material_node, complex_condition_dict, complex_citation_dict, complex_data_node, simple_process_node, simple_computation_node) -> dict: + ret_dict = { + "node": ["Property"], + "key": "modulus_shear", + "type": "value", + "value": 5.0, + "unit": "GPa", + "uncertainty": 0.1, + "uncertainty_type": "stdev", + "structure": "structure", + "sample_preparation": json.loads(simple_process_node.get_json(condense_to_uuid={}).json), + "method": "comp", + "condition": [complex_condition_dict], + "data": [json.loads(complex_data_node.get_json(condense_to_uuid={}).json)], + "citation": [complex_citation_dict], + "computation": [json.loads(simple_computation_node.get_json(condense_to_uuid={}).json)], + "notes": "my complex_property_node notes", + } + return strip_uid_from_dict(ret_dict) + + +@pytest.fixture(scope="function") +def simple_property_node() -> cript.Property: + my_property = cript.Property( + key="modulus_shear", + type="value", + value=5.0, + unit="GPa", + ) + return my_property + + +@pytest.fixture(scope="function") +def simple_property_dict() -> dict: + ret_dict = { + "node": ["Property"], + "key": "modulus_shear", + "type": "value", + "value": 5.0, + "unit": "GPa", + } + return strip_uid_from_dict(ret_dict) + + +@pytest.fixture(scope="function") +def complex_condition_node(complex_data_node) -> cript.Condition: + my_complex_condition = cript.Condition( + key="temperature", + type="value", + value=22, + unit="C", + descriptor="room temperature of lab", + uncertainty=5, + uncertainty_type="stdev", + set_id=0, + measurement_id=2, + data=[copy.deepcopy(complex_data_node)], + ) + return my_complex_condition + + +@pytest.fixture(scope="function") +def complex_condition_dict(complex_data_node) -> dict: + ret_dict = { + "node": ["Condition"], + "key": "temperature", + "type": "value", + "descriptor": "room temperature of lab", + "value": 22, + "unit": "C", + "uncertainty": 5, + "uncertainty_type": "stdev", + "set_id": 0, + "measurement_id": 2, + "data": [json.loads(complex_data_node.get_json(condense_to_uuid={}).json)], + } + return ret_dict + + +@pytest.fixture(scope="function") +def complex_ingredient_node(complex_material_node, complex_quantity_node) -> cript.Ingredient: + """ + complex ingredient node with all possible parameters filled + """ + complex_ingredient_node = cript.Ingredient(material=complex_material_node, quantity=[complex_quantity_node], keyword=["catalyst"]) + + return complex_ingredient_node + + +@pytest.fixture(scope="function") +def complex_ingredient_dict(complex_material_node, complex_quantity_dict) -> dict: + ret_dict = {"node": ["Ingredient"], "material": json.loads(complex_material_node.json), "quantity": [complex_quantity_dict], "keyword": ["catalyst"]} + return ret_dict + + +@pytest.fixture(scope="function") +def simple_ingredient_node(simple_material_node, complex_quantity_node) -> cript.Ingredient: + """ + minimal ingredient sub-object used for testing + + Notes + ---- + The main difference is that this uses a simple material with less chance of getting any errors + """ + + simple_material_node.name = f"{simple_material_node.name}_{uuid.uuid4().hex}" + + my_simple_ingredient = cript.Ingredient(material=simple_material_node, quantity=[complex_quantity_node], keyword=["catalyst"]) + + return my_simple_ingredient + + +@pytest.fixture(scope="function") +def complex_equipment_node(complex_condition_node, complex_citation_node) -> cript.Equipment: + """ + maximal equipment node with all possible attributes + """ + my_complex_equipment = cript.Equipment( + key="hot_plate", + description="fancy hot plate for complex_equipment_node", + condition=[complex_condition_node], + citation=[complex_citation_node], + ) + return my_complex_equipment + + +@pytest.fixture(scope="function") +def simple_equipment_node() -> cript.Equipment: + """ + simple and minimal equipment + """ + my_equipment = cript.Equipment(key="burner", description="my simple equipment fixture description") + return my_equipment + + +@pytest.fixture(scope="function") +def complex_equipment_dict(complex_condition_dict, complex_citation_dict) -> dict: + ret_dict = { + "node": ["Equipment"], + "key": "hot_plate", + "description": "fancy hot plate for complex_equipment_node", + "condition": [complex_condition_dict], + "citation": [complex_citation_dict], + } + return ret_dict + + +@pytest.fixture(scope="function") +def complex_computational_forcefield_node(simple_data_node, complex_citation_node) -> cript.ComputationalForcefield: + """ + maximal computational_forcefield sub-object with all possible arguments included in it + """ + my_complex_computational_forcefield_node = cript.ComputationalForcefield( + key="opls_aa", + building_block="atom", + coarse_grained_mapping="atom -> atom", + implicit_solvent="no implicit solvent", + source="local LigParGen installation", + description="this is a test forcefield for complex_computational_forcefield_node", + data=[simple_data_node], + citation=[complex_citation_node], + ) + return my_complex_computational_forcefield_node + + +@pytest.fixture(scope="function") +def complex_computational_forcefield_dict(simple_data_node, complex_citation_dict) -> dict: + ret_dict = { + "node": ["ComputationalForcefield"], + "key": "opls_aa", + "building_block": "atom", + "coarse_grained_mapping": "atom -> atom", + "implicit_solvent": "no implicit solvent", + "source": "local LigParGen installation", + "description": "this is a test forcefield for complex_computational_forcefield_node", + "citation": [complex_citation_dict], + "data": [json.loads(simple_data_node.json)], + } + return ret_dict + + +@pytest.fixture(scope="function") +def complex_software_configuration_node(complex_software_node, simple_algorithm_node, complex_citation_node) -> cript.SoftwareConfiguration: + """ + maximal software_configuration sub-object with all possible attributes + """ + my_complex_software_configuration_node = cript.SoftwareConfiguration(software=complex_software_node, algorithm=[simple_algorithm_node], notes="my_complex_software_configuration_node notes", citation=[complex_citation_node]) + return my_complex_software_configuration_node + + +@pytest.fixture(scope="function") +def complex_software_configuration_dict(complex_software_dict, simple_algorithm_dict, complex_citation_dict) -> dict: + ret_dict = { + "node": ["SoftwareConfiguration"], + "software": complex_software_dict, + "algorithm": [simple_algorithm_dict], + "notes": "my_complex_software_configuration_node notes", + "citation": [complex_citation_dict], + } + return ret_dict + + +@pytest.fixture(scope="function") +def simple_software_configuration(complex_software_node) -> cript.SoftwareConfiguration: + """ + minimal software configuration node with only required arguments + """ + my_software_configuration = cript.SoftwareConfiguration(software=complex_software_node) + + return my_software_configuration + + +@pytest.fixture(scope="function") +def simple_computational_forcefield_node(): + """ + simple minimal computational forcefield node + """ + + return cript.ComputationalForcefield(key="amber", building_block="atom") + + +@pytest.fixture(scope="function") +def simple_condition_node() -> cript.Condition: + """ + simple and minimal condition node + """ + return cript.Condition(key="atm", type="max", value=1) diff --git a/tests/fixtures/supporting_nodes.py b/tests/fixtures/supporting_nodes.py new file mode 100644 index 000000000..cca09e2cb --- /dev/null +++ b/tests/fixtures/supporting_nodes.py @@ -0,0 +1,35 @@ +import datetime +import json + +import pytest + +import cript + + +@pytest.fixture(scope="function") +def complex_file_node() -> cript.File: + """ + complex file node with only required arguments + """ + my_file = cript.File(name="my complex file node fixture", source="https://criptapp.org", type="calibration", extension=".csv", data_dictionary="my file's data dictionary") + + return my_file + + +@pytest.fixture(scope="function") +def complex_user_dict() -> dict: + user_dict = {"node": ["User"]} + user_dict["created_at"] = str(datetime.datetime.now()) + user_dict["model_version"] = "1.0.0" + user_dict["picture"] = "/my/picture/path" + user_dict["updated_at"] = str(datetime.datetime.now()) + user_dict["username"] = "testuser" + user_dict["email"] = "test@emai.com" + user_dict["orcid"] = "0000-0002-0000-0000" + return user_dict + + +@pytest.fixture(scope="function") +def complex_user_node(complex_user_dict) -> cript.User: + user_node = cript.load_nodes_from_json(json.dumps(complex_user_dict)) + return user_node diff --git a/tests/integration_test_helper.py b/tests/integration_test_helper.py new file mode 100644 index 000000000..e3b34d902 --- /dev/null +++ b/tests/integration_test_helper.py @@ -0,0 +1,98 @@ +import json + +import pytest +from conftest import HAS_INTEGRATION_TESTS_ENABLED +from deepdiff import DeepDiff + +import cript + + +def integrate_nodes_helper(cript_api: cript.API, project_node: cript.Project): + """ + integration test between Python SDK and API Client + tests both POST and GET + + comparing JSON because it is easier to compare than an object + + test both the project node: + * node serialization + * POST to API + * GET from API + * deserialization from API JSON to node JSON + * compare the JSON of what was sent and what was deserialized from the API + * the fields they have in common should be the same + + Parameters + ---------- + cript_api: cript.API + pass in the cript_api client that is already available as a fixture + project_node: cript.Project + the desired project to use for integration test + + 1. create a project with the desired node to test + * pass in the project to this function + 1. save the project + 1. get the project + 1. deserialize the project to node + 1. convert the new node to JSON + 1. compare the project node JSON that was sent to API and the node the API gave, have the same JSON + + Notes + ----- + * using deepdiff library to do the nested JSON comparisons + * ignoring the UID field through all the JSON because those the API changes when responding + """ + + if not HAS_INTEGRATION_TESTS_ENABLED: + pytest.skip("Integration tests with API requires real API and Storage token") + return + + print("\n\n=================== Project Node ============================") + print(project_node.get_json(sort_keys=False, condense_to_uuid={}, indent=2).json) + print("==============================================================") + + cript_api.save(project_node) + + # get the project that was just saved + my_paginator = cript_api.search(node_type=cript.Project, search_mode=cript.SearchModes.EXACT_NAME, value_to_search=project_node.name) + + # get the project from paginator + my_project_from_api_dict = my_paginator.current_page_results[0] + + print("\n\n================= API Response Node ============================") + print(json.dumps(my_project_from_api_dict, sort_keys=False, indent=2)) + print("==============================================================") + + # Configure keys and blocks to be ignored by deepdiff using exclude_regex_path + # ignores all UID within the JSON because those will always be different + # and ignores elements that the back ends to graphs. + exclude_regex_paths = [ + r"root(\[.*\])?\['uid'\]", + r"root\['\w+_count'\]", # All the attributes that end with _count + r"root(\[.*\])?\['\w+_count'\]", # All the attributes that end with _count + r"root(\[.*\])?\['locked'\]", + r"root(\[.*\])?\['admin'\]", + r"root(\[.*\])?\['created_at'\]", + r"root(\[.*\])?\['created_by'\]", + r"root(\[.*\])?\['updated_at'\]", + r"root(\[.*\])?\['updated_by'\]", + r"root(\[.*\])?\['public'\]", + r"root(\[.*\])?\['notes'\]", + r"root(\[.*\])?\['model_version'\]", + ] + # Compare the JSONs + diff = DeepDiff(json.loads(project_node.json), my_project_from_api_dict, exclude_regex_paths=exclude_regex_paths) + # with open("la", "a") as file_handle: + # file_handle.write(str(diff) + "\n") + + print("diff", diff) + # assert not list(diff.get("values_changed", [])) + # assert not list(diff.get("dictionary_item_removed", [])) + # assert not list(diff.get("dictionary_item_added", [])) + + # try to convert api JSON project to node + my_project_from_api = cript.load_nodes_from_json(json.dumps(my_project_from_api_dict)) + print("\n\n=================== Project Node Deserialized =========================") + print(my_project_from_api.get_json(sort_keys=False, condense_to_uuid={}, indent=2).json) + print("==============================================================") + print("\n\n\n######################################## TEST Passed ########################################\n\n\n") diff --git a/tests/nodes/primary_nodes/test_collection.py b/tests/nodes/primary_nodes/test_collection.py new file mode 100644 index 000000000..f4e7bb304 --- /dev/null +++ b/tests/nodes/primary_nodes/test_collection.py @@ -0,0 +1,170 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_collection(simple_experiment_node) -> None: + """ + test to see a simple collection node can be created with only required arguments + + Notes + ----- + * [Collection](https://pubs.acs.org/doi/suppl/10.1021/acscentsci.3c00011/suppl_file/oc3c00011_si_001.pdf#page=8) + has no required attributes. + * The Python SDK only requires Collections to have `name` + * Since it doesn't make sense to have an empty Collection I added an Experiment to the Collection as well + """ + my_collection_name = "my collection name" + + my_collection = cript.Collection(name=my_collection_name, experiment=[simple_experiment_node]) + + # assertions + assert isinstance(my_collection, cript.Collection) + assert my_collection.name == my_collection_name + assert my_collection.experiment == [simple_experiment_node] + + +def test_create_complex_collection(simple_experiment_node, simple_inventory_node, complex_citation_node) -> None: + """ + test to see if Collection can be made with all the possible optional arguments + """ + my_collection_name = "my complex collection name" + my_cript_doi = "10.1038/1781168a0" + + my_collection = cript.Collection( + name=my_collection_name, + experiment=[simple_experiment_node], + inventory=[simple_inventory_node], + doi=my_cript_doi, + citation=[complex_citation_node], + ) + + # assertions + assert isinstance(my_collection, cript.Collection) + assert my_collection.name == my_collection_name + assert my_collection.experiment == [simple_experiment_node] + assert my_collection.inventory == [simple_inventory_node] + assert my_collection.doi == my_cript_doi + assert my_collection.citation == [complex_citation_node] + + +def test_collection_getters_and_setters(simple_experiment_node, simple_inventory_node, complex_citation_node) -> None: + """ + test that Collection getters and setters are working properly + + 1. create a simple Collection node + 2. use the setter to set the Collection node's attributes + 3. use the getter to get the Collection's attributes + 4. assert that what was set and what was gotten are the same + """ + my_collection = cript.Collection(name="my collection name") + + new_collection_name = "my new collection name" + new_cript_doi = "my new cript doi" + + # set Collection attributes + my_collection.name = new_collection_name + my_collection.experiment = [simple_experiment_node] + my_collection.inventory = [simple_inventory_node] + my_collection.doi = new_cript_doi + my_collection.citation = [complex_citation_node] + + # assert getters and setters are the same + assert isinstance(my_collection, cript.Collection) + assert my_collection.name == new_collection_name + assert my_collection.experiment == [simple_experiment_node] + assert my_collection.inventory == [simple_inventory_node] + assert my_collection.doi == new_cript_doi + assert my_collection.citation == [complex_citation_node] + + +def test_serialize_collection_to_json(complex_user_node) -> None: + """ + test that Collection node can be correctly serialized to JSON + + 1. create a simple Collection node with all required arguments + 1. convert Collection to JSON and back to dict + 1. compare expected_collection dict and Collection dict, and they should be the same + + Notes + ----- + * Compare dicts instead of JSON string because dict comparison is more accurate + """ + + expected_collection_dict = { + "node": ["Collection"], + "name": "my collection name", + "experiment": [{"node": ["Experiment"], "name": "my experiment name"}], + "member": [json.loads(copy.deepcopy(complex_user_node).json)], + "admin": [json.loads(complex_user_node.json)], + } + + collection_node = cript.load_nodes_from_json(json.dumps(expected_collection_dict)) + print(collection_node.get_json(indent=2).json) + # assert + ref_dict = json.loads(collection_node.get_json(condense_to_uuid={}).json) + ref_dict = strip_uid_from_dict(ref_dict) + + assert ref_dict == strip_uid_from_dict(expected_collection_dict) + + +def test_uuid(complex_collection_node): + collection_node = complex_collection_node + + # Deep copies should not share uuid (or uids) or urls + collection_node2 = copy.deepcopy(complex_collection_node) + assert collection_node.uuid != collection_node2.uuid + assert collection_node.uid != collection_node2.uid + assert collection_node.url != collection_node2.url + + # Loads from json have the same uuid and url + collection_node3 = cript.load_nodes_from_json(collection_node.get_json(condense_to_uuid={}).json) + assert collection_node3.uuid == collection_node.uuid + assert collection_node3.url == collection_node.url + + +def test_integration_collection(cript_api, simple_project_node, simple_collection_node): + """ + integration test between Python SDK and API Client + + ## Create + 1. Serialize SDK Nodes to JSON + 1. POST to API + 1. GET from API + 1. Deserialize API JSON to SDK Nodes + 1. assert they're both equal + + ## Update + 1. Change JSON + 1. POST/PATCH to API + 1. GET from API + 1. Deserialize API JSON to SDK Nodes + 1. assert they're both equal + + Notes + ----- + - [x] Create + - [x] Read + - [x] Update + """ + + # rename project and collection to not bump into duplicate issues + simple_project_node.name = f"test_integration_collection_project_name_{uuid.uuid4().hex}" + simple_collection_node.name = f"test_integration_collection_name_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + + # ========= test create ========= + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + simple_project_node.collection[0].doi = "my doi UPDATED" + # TODO enable later + # simple_project_node.collection[0].notes = "my collection notes UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_computation.py b/tests/nodes/primary_nodes/test_computation.py new file mode 100644 index 000000000..802168e03 --- /dev/null +++ b/tests/nodes/primary_nodes/test_computation.py @@ -0,0 +1,130 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_computation_node() -> None: + """ + test that a simple computation node with all possible arguments can be created successfully + """ + my_computation_type = "analysis" + my_computation_name = "this is my computation name" + + my_computation_node = cript.Computation(name=my_computation_name, type=my_computation_type) + + # assertions + assert isinstance(my_computation_node, cript.Computation) + assert my_computation_node.name == my_computation_name + assert my_computation_node.type == my_computation_type + + +def test_create_complex_computation_node(simple_data_node, complex_software_configuration_node, complex_condition_node, simple_computation_node, complex_citation_node) -> None: + """ + test that a complex computation node with all possible arguments can be created + """ + my_computation_type = "analysis" + + citation = copy.deepcopy(complex_citation_node) + condition = copy.deepcopy(complex_condition_node) + my_computation_node = cript.Computation( + name="my complex computation node name", + type="analysis", + input_data=[simple_data_node], + output_data=[simple_data_node], + software_configuration=[complex_software_configuration_node], + condition=[condition], + prerequisite_computation=simple_computation_node, + citation=[citation], + ) + + # assertions + assert isinstance(my_computation_node, cript.Computation) + assert my_computation_node.type == my_computation_type + assert my_computation_node.input_data == [simple_data_node] + assert my_computation_node.output_data == [simple_data_node] + assert my_computation_node.software_configuration == [complex_software_configuration_node] + assert my_computation_node.condition == [condition] + assert my_computation_node.prerequisite_computation == simple_computation_node + assert my_computation_node.citation == [citation] + + +def test_computation_type_invalid_vocabulary() -> None: + """ + tests that setting the Computation type to an invalid vocabulary word gives the expected error + + Returns + ------- + None + """ + pass + + +def test_computation_getters_and_setters(simple_computation_node, simple_data_node, complex_software_configuration_node, complex_condition_node, complex_citation_node) -> None: + """ + tests that all the getters and setters are working fine + + Notes + ----- + indirectly tests setting the data type to correct vocabulary + """ + new_type: str = "data_fit" + new_notes: str = "my computation node note" + + # since the simple_computation_node only has type, the rest of them I can just set and test + simple_computation_node.type = new_type + simple_computation_node.input_data = [simple_data_node] + simple_computation_node.output_data = [simple_data_node] + simple_computation_node.software_configuration = [complex_software_configuration_node] + condition = copy.deepcopy(complex_condition_node) + simple_computation_node.condition = [condition] + citation = copy.deepcopy(complex_citation_node) + simple_computation_node.citation = [citation] + simple_computation_node.notes = new_notes + + # assert getter and setter are same + assert simple_computation_node.type == new_type + assert simple_computation_node.input_data == [simple_data_node] + assert simple_computation_node.output_data == [simple_data_node] + assert simple_computation_node.software_configuration == [complex_software_configuration_node] + assert simple_computation_node.condition == [condition] + assert simple_computation_node.citation == [citation] + assert simple_computation_node.notes == new_notes + + +def test_serialize_computation_to_json(simple_computation_node) -> None: + """ + tests that it can correctly turn the computation node into its equivalent JSON + """ + # TODO test this more vigorously + expected_dict = {"node": ["Computation"], "name": "my computation name", "type": "analysis"} + + # comparing dicts for better test + ref_dict = json.loads(simple_computation_node.json) + ref_dict = strip_uid_from_dict(ref_dict) + assert ref_dict == expected_dict + + +def test_integration_computation(cript_api, simple_project_node, simple_computation_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + """ + # --------- test create --------- + simple_project_node.name = f"test_integration_computation_name_{uuid.uuid4().hex}" + simple_project_node.collection[0].experiment[0].computation = [simple_computation_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # --------- test update --------- + # change simple computation attribute to trigger update + simple_project_node.collection[0].experiment[0].computation[0].type = "data_fit" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_computational_process.py b/tests/nodes/primary_nodes/test_computational_process.py new file mode 100644 index 000000000..edc93d842 --- /dev/null +++ b/tests/nodes/primary_nodes/test_computational_process.py @@ -0,0 +1,133 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_computational_process(simple_data_node, complex_ingredient_node) -> None: + """ + create a simple computational_process node with required arguments + """ + + my_computational_process = cript.ComputationProcess( + name="my computational process node name", + type="cross_linking", + input_data=[simple_data_node], + ingredient=[complex_ingredient_node], + ) + + # assertions + assert isinstance(my_computational_process, cript.ComputationProcess) + assert my_computational_process.type == "cross_linking" + assert my_computational_process.input_data == [simple_data_node] + assert my_computational_process.ingredient == [complex_ingredient_node] + + +def test_create_complex_computational_process( + simple_data_node, + complex_ingredient_node, + complex_software_configuration_node, + complex_condition_node, + simple_property_node, + complex_citation_node, +) -> None: + """ + create a complex computational process with all possible arguments + """ + + computational_process_name = "my computational process name" + computational_process_type = "cross_linking" + + ingredient = complex_ingredient_node + data = simple_data_node + my_computational_process = cript.ComputationProcess( + name=computational_process_name, + type=computational_process_type, + input_data=[data], + ingredient=[ingredient], + output_data=[simple_data_node], + software_configuration=[complex_software_configuration_node], + condition=[complex_condition_node], + property=[simple_property_node], + citation=[complex_citation_node], + ) + + # assertions + assert isinstance(my_computational_process, cript.ComputationProcess) + assert my_computational_process.name == computational_process_name + assert my_computational_process.type == computational_process_type + assert my_computational_process.input_data == [data] + assert my_computational_process.ingredient == [ingredient] + assert my_computational_process.output_data == [simple_data_node] + assert my_computational_process.software_configuration == [complex_software_configuration_node] + assert my_computational_process.condition == [complex_condition_node] + assert my_computational_process.property == [simple_property_node] + assert my_computational_process.citation == [complex_citation_node] + + +def test_serialize_computational_process_to_json(simple_computational_process_node) -> None: + """ + tests that a computational process node can be correctly serialized to JSON + """ + expected_dict: dict = { + "node": ["ComputationProcess"], + "name": "my computational process node name", + "type": "cross_linking", + "input_data": [ + { + "node": ["Data"], + "name": "my data name", + "type": "afm_amp", + "file": [{"node": ["File"], "name": "my complex file node fixture", "source": "https://criptapp.org", "type": "calibration", "extension": ".csv", "data_dictionary": "my file's data dictionary"}], + } + ], + "ingredient": [ + { + "node": ["Ingredient"], + "material": {}, + "quantity": [{"node": ["Quantity"], "key": "mass", "value": 11.2, "unit": "kg", "uncertainty": 0.2, "uncertainty_type": "stdev"}], + "keyword": ["catalyst"], + } + ], + } + + ref_dict = json.loads(simple_computational_process_node.json) + ref_dict = strip_uid_from_dict(ref_dict) + assert ref_dict == expected_dict + + +def test_integration_computational_process(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simplest_computational_process_node, simple_material_node, simple_data_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + """ + # ========= test create ========= + # renaming to avoid duplicate node errors + simple_project_node.name = f"test_integration_computation_process_name_{uuid.uuid4().hex}" + + simple_material_node.name = f"{simple_material_node.name}_{uuid.uuid4().hex}" + + simple_project_node.material = [simple_material_node] + + simple_project_node.collection = [simple_collection_node] + + simple_project_node.collection[0].experiment = [simple_experiment_node] + + # fixing orphanedDataNodeError + simple_project_node.collection[0].experiment[0].data = [simple_data_node] + + simple_project_node.collection[0].experiment[0].computation_process = [simplest_computational_process_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change computational_process to trigger update + simple_project_node.collection[0].experiment[0].computation_process[0].type = "DPD" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_data.py b/tests/nodes/primary_nodes/test_data.py new file mode 100644 index 000000000..90afda6e2 --- /dev/null +++ b/tests/nodes/primary_nodes/test_data.py @@ -0,0 +1,167 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_data_node(complex_file_node) -> None: + """ + create a simple data node with only required arguments + """ + my_data_type = "afm_amp" + + my_data = cript.Data(name="my data name", type=my_data_type, file=[complex_file_node]) + + # assertions + assert isinstance(my_data, cript.Data) + assert my_data.type == my_data_type + assert my_data.file == [complex_file_node] + + +def test_create_complex_data_node( + complex_file_node, + simple_process_node, + simple_computation_node, + simple_computational_process_node, + simple_material_node, + complex_citation_node, +) -> None: + """ + create a complex data node with all possible arguments + """ + + file_node = copy.deepcopy(complex_file_node) + my_complex_data = cript.Data( + name="my complex data node name", + type="afm_amp", + file=[file_node], + sample_preparation=simple_process_node, + computation=[simple_computation_node], + computation_process=[simple_computational_process_node], + material=[simple_material_node], + process=[simple_process_node], + # citation=[complex_citation_node], + ) + + # assertions + assert isinstance(my_complex_data, cript.Data) + assert my_complex_data.type == "afm_amp" + assert my_complex_data.file == [file_node] + assert my_complex_data.sample_preparation == simple_process_node + assert my_complex_data.computation == [simple_computation_node] + assert my_complex_data.computation_process == [simple_computational_process_node] + assert my_complex_data.material == [simple_material_node] + assert my_complex_data.process == [simple_process_node] + # assert my_complex_data.citation == [complex_citation_node] + + +def test_data_getters_and_setters( + simple_data_node, + complex_file_node, + simple_process_node, + simple_computation_node, + simple_computational_process_node, + simple_material_node, + complex_citation_node, +) -> None: + """ + tests that all the getters and setters are working fine + + Notes + ----- + indirectly tests setting the data type to correct vocabulary + + Returns + ------- + None + """ + my_data_type = "afm_height" + + my_new_files = [ + complex_file_node, + cript.File( + name="my data file node", + source="https://bing.com", + type="computation_config", + extension=".pdf", + data_dictionary="my second file data dictionary", + ), + ] + + # use setters + comp_process = simple_computational_process_node + simple_data_node.type = my_data_type + simple_data_node.file = my_new_files + simple_data_node.sample_preparation = simple_process_node + simple_data_node.computation = [simple_computation_node] + simple_data_node.computation_process = [comp_process] + simple_data_node.material = [simple_material_node] + simple_data_node.process = [simple_process_node] + simple_data_node.citation = [complex_citation_node] + + # assertions check getters and setters + assert simple_data_node.type == my_data_type + assert simple_data_node.file == my_new_files + assert simple_data_node.sample_preparation == simple_process_node + assert simple_data_node.computation == [simple_computation_node] + assert simple_data_node.computation_process == [comp_process] + assert simple_data_node.material == [simple_material_node] + assert simple_data_node.process == [simple_process_node] + assert simple_data_node.citation == [complex_citation_node] + + +def test_serialize_data_to_json(simple_data_node) -> None: + """ + tests that it can correctly turn the data node into its equivalent JSON + """ + + # TODO should Base attributes should be in here too like notes, public, model version, etc? + expected_data_dict = { + "node": ["Data"], + "type": "afm_amp", + "name": "my data name", + "file": [ + { + "node": ["File"], + "name": "my complex file node fixture", + "data_dictionary": "my file's data dictionary", + "extension": ".csv", + "source": "https://criptapp.org", + "type": "calibration", + } + ], + } + + ref_dict = json.loads(simple_data_node.json) + ref_dict = strip_uid_from_dict(ref_dict) + assert ref_dict == expected_data_dict + + +def test_integration_data(cript_api, simple_project_node, simple_data_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + + Notes + ----- + indirectly tests complex file as well because every data node must have a file node + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_project_name_{uuid.uuid4().hex}" + + simple_project_node.collection[0].experiment[0].data = [simple_data_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # update a simple attribute of data to trigger update + simple_project_node.collection[0].experiment[0].data[0].type = "afm_height" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_experiment.py b/tests/nodes/primary_nodes/test_experiment.py new file mode 100644 index 000000000..4f6435f6f --- /dev/null +++ b/tests/nodes/primary_nodes/test_experiment.py @@ -0,0 +1,214 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_experiment() -> None: + """ + test just to see if a minimal experiment can be made without any issues + """ + experiment_name = "my experiment name" + + my_experiment = cript.Experiment(name=experiment_name) + + # assertions + assert isinstance(my_experiment, cript.Experiment) + + +def test_create_complex_experiment(simple_process_node, simple_computation_node, simple_computational_process_node, simple_data_node, complex_citation_node) -> None: + """ + test to see if Collection can be made with all the possible options filled + """ + experiment_name = "my experiment name" + experiment_funders = ["National Science Foundation", "IRIS", "NIST"] + + citation = copy.deepcopy(complex_citation_node) + my_experiment = cript.Experiment( + name=experiment_name, + process=[simple_process_node], + computation=[simple_computation_node], + computation_process=[simple_computational_process_node], + data=[simple_data_node], + funding=experiment_funders, + citation=[citation], + ) + + # assertions + assert isinstance(my_experiment, cript.Experiment) + assert my_experiment.name == experiment_name + assert my_experiment.process == [simple_process_node] + assert my_experiment.computation == [simple_computation_node] + assert my_experiment.computation_process == [simple_computational_process_node] + assert my_experiment.data == [simple_data_node] + assert my_experiment.funding == experiment_funders + assert my_experiment.citation[-1] == citation + + +def test_all_getters_and_setters_for_experiment( + simple_experiment_node, + simple_process_node, + simple_computation_node, + simple_computational_process_node, + simple_data_node, + complex_citation_node, +) -> None: + """ + tests all the getters and setters for the experiment + + 1. create a node with only the required arguments + 2. set all the properties for the experiment + 3. get all the properties for the experiment + 4. assert that what you set in the setter and the getter are equal to each other + """ + experiment_name = "my new experiment name" + experiment_funders = ["MIT", "European Research Council (ERC)", "Japan Society for the Promotion of Science (JSPS)"] + + # set experiment properties + simple_experiment_node.name = experiment_name + simple_experiment_node.process = [simple_process_node] + simple_experiment_node.computation = [simple_computation_node] + simple_experiment_node.computation_process = [simple_computational_process_node] + simple_experiment_node.data = [simple_data_node] + simple_experiment_node.funding = experiment_funders + citation = copy.deepcopy(complex_citation_node) + simple_experiment_node.citation = [citation] + + # assert getters and setters are equal + assert isinstance(simple_experiment_node, cript.Experiment) + assert simple_experiment_node.name == experiment_name + assert simple_experiment_node.process == [simple_process_node] + assert simple_experiment_node.computation == [simple_computation_node] + assert simple_experiment_node.computation_process == [simple_computational_process_node] + assert simple_experiment_node.data == [simple_data_node] + assert simple_experiment_node.funding == experiment_funders + assert simple_experiment_node.citation[-1] == citation + + +def test_experiment_json(simple_process_node, simple_computation_node, simple_computational_process_node, simple_data_node, complex_citation_node, complex_citation_dict) -> None: + """ + tests that the experiment JSON is functioning correctly + + 1. create an experiment with all possible attributes + 2. convert the experiment into a JSON + 3. assert that the JSON is that it produces is equal to what you expected + + Notes + ----- + indirectly tests that the notes attribute also works within the experiment node. + All nodes inherit from the base node, so if the base node attribute is working in this test + there is a good chance that it will work correctly for all other nodes that inherit from it as well + """ + experiment_name = "my experiment name" + experiment_funders = ["National Science Foundation", "IRIS", "NIST"] + + citation = copy.deepcopy(complex_citation_node) + my_experiment = cript.Experiment( + name=experiment_name, + process=[simple_process_node], + computation=[simple_computation_node], + computation_process=[simple_computational_process_node], + data=[simple_data_node], + funding=experiment_funders, + citation=[citation], + ) + + # adding notes to test base node attributes + my_experiment.notes = "these are all of my notes for this experiment" + + # TODO this is unmaintainable and we should figure out a strategy for a better way + expected_experiment_dict = { + "node": ["Experiment"], + "name": "my experiment name", + "notes": "these are all of my notes for this experiment", + "process": [{"node": ["Process"], "name": "my process name", "type": "affinity_pure"}], + "computation": [{"node": ["Computation"], "name": "my computation name", "type": "analysis"}], + "computation_process": [ + { + "node": ["ComputationProcess"], + "name": "my computational process node name", + "type": "cross_linking", + "input_data": [ + { + "node": ["Data"], + "name": "my data name", + "type": "afm_amp", + "file": [{"node": ["File"], "name": "my complex file node fixture", "source": "https://criptapp.org", "type": "calibration", "extension": ".csv", "data_dictionary": "my file's data dictionary"}], + } + ], + "ingredient": [ + { + "node": ["Ingredient"], + "material": {}, + "quantity": [{"node": ["Quantity"], "key": "mass", "value": 11.2, "unit": "kg", "uncertainty": 0.2, "uncertainty_type": "stdev"}], + "keyword": ["catalyst"], + } + ], + } + ], + "data": [{}], + "funding": ["National Science Foundation", "IRIS", "NIST"], + "citation": [ + { + "node": ["Citation"], + "type": "reference", + "reference": { + "node": ["Reference"], + "type": "journal_article", + "title": "Multi-architecture Monte-Carlo (MC) simulation of soft coarse-grained polymeric materials: SOft coarse grained Monte-Carlo Acceleration (SOMA)", + "author": ["Ludwig Schneider", "Marcus M\u00fcller"], + "journal": "Computer Physics Communications", + "publisher": "Elsevier", + "year": 2019, + "pages": [463, 476], + "doi": "10.1016/j.cpc.2018.08.011", + "issn": "0010-4655", + "website": "https://www.sciencedirect.com/science/article/pii/S0010465518303072", + }, + } + ], + } + + ref_dict = json.loads(my_experiment.json) + ref_dict = strip_uid_from_dict(ref_dict) + + assert len(ref_dict) == len(expected_experiment_dict) + assert ref_dict == expected_experiment_dict + + +# -------- Integration Tests -------- +def test_integration_experiment(cript_api, simple_project_node, simple_collection_node, simple_experiment_node): + """ + integration test between Python SDK and API Client + + tests both POST and GET + + 1. create a project + 1. create a collection + 1. add collection to project + 1. save the project + 1. get the project + 1. deserialize the project to node + 1. convert the new node to JSON + 1. compare the project node JSON that was sent to API and the node the API gave, have the same JSON + + Notes + ----- + comparing JSON because it is easier to compare than an object + """ + # ========= test create ========= + # rename project and collection to not bump into duplicate issues + simple_project_node.name = f"test_integration_experiment_project_name_{uuid.uuid4().hex}" + simple_project_node.collection = [simple_collection_node] + simple_project_node.collection[0].experiment = [simple_experiment_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # update simple attribute to trigger update + simple_project_node.collection[0].experiment[0].funding = ["update1", "update2", "update3"] + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_inventory.py b/tests/nodes/primary_nodes/test_inventory.py new file mode 100644 index 000000000..2eaa36df4 --- /dev/null +++ b/tests/nodes/primary_nodes/test_inventory.py @@ -0,0 +1,72 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_get_and_set_inventory(simple_inventory_node) -> None: + """ + tests that a material list for the inventory node can be gotten and set correctly + + 1. create new material node + 2. set the material's list + 3. get the material's list + 1. originally in simple_inventory it has 2 materials, but after the setter it should have only 1 + 4. assert that the materials list set and the one gotten are the same + """ + # create new materials + material_1 = cript.Material(name="new material 1", identifiers=[{"names": ["new material 1 alternative name"]}]) + + # set inventory materials + simple_inventory_node.material = [material_1] + + # get and check inventory materials + assert isinstance(simple_inventory_node, cript.Inventory) + assert simple_inventory_node.material[-1] == material_1 + + +def test_inventory_serialization(simple_inventory_node, simple_material_dict) -> None: + """ + test that the inventory is correctly serializing into JSON + + 1. converts inventory json string to dict + 2. strips the UID from all the nodes within that dict + 3. compares the expected_dict written to what JSON deserializes + """ + expected_dict = {"node": ["Inventory"], "name": "my inventory name", "material": [simple_material_dict, {"node": ["Material"], "name": "material 2", "bigsmiles": "my big smiles"}]} + + # TODO this needs better testing + # force not condensing to edge uuid during json serialization + deserialized_inventory: dict = json.loads(simple_inventory_node.get_json(condense_to_uuid={}).json) + deserialized_inventory = strip_uid_from_dict(deserialized_inventory) + deserialized_inventory["material"][0]["name"] = "my material" + deserialized_inventory["material"][1]["name"] = "material 2" + + assert expected_dict == deserialized_inventory + + +def test_integration_inventory(cript_api, simple_project_node, simple_inventory_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + """ + # ========= test create ========= + # putting UUID in name so it doesn't bump into uniqueness errors + simple_project_node.name = f"project_name_{uuid.uuid4().hex}" + simple_project_node.collection[0].name = f"collection_name_{uuid.uuid4().hex}" + simple_inventory_node.name = f"inventory_name_{uuid.uuid4().hex}" + + simple_project_node.collection[0].inventory = [simple_inventory_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + simple_project_node.collection[0].inventory[0].notes = "inventory notes UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_material.py b/tests/nodes/primary_nodes/test_material.py new file mode 100644 index 000000000..b552919db --- /dev/null +++ b/tests/nodes/primary_nodes/test_material.py @@ -0,0 +1,134 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_complex_material(simple_material_node, simple_computational_forcefield_node, simple_process_node) -> None: + """ + tests that a simple material can be created with only the required arguments + """ + + material_name = "my material name" + identifiers = [{"bigsmiles": "1234"}, {"bigsmiles": "4567"}] + keyword = ["acetylene"] + + component = [simple_material_node] + forcefield = [simple_computational_forcefield_node] + + my_property = [cript.Property(key="modulus_shear", type="min", value=1.23, unit="gram")] + + my_material = cript.Material(name=material_name, identifiers=identifiers, keyword=keyword, component=component, process=simple_process_node, property=my_property, computational_forcefield=forcefield) + + assert isinstance(my_material, cript.Material) + assert my_material.name == material_name + assert my_material.identifiers == identifiers + assert my_material.keyword == keyword + assert my_material.component == component + assert my_material.process == simple_process_node + assert my_material.property == my_property + assert my_material.computational_forcefield == forcefield + + +def test_invalid_material_keywords() -> None: + """ + tries to create a material with invalid keywords and expects to get an Exception + """ + # with pytest.raises(InvalidVocabulary): + pass + + +def test_all_getters_and_setters(simple_material_node, simple_property_node, simple_process_node, simple_computational_forcefield_node) -> None: + """ + tests the getters and setters for the simple material object + + 1. sets every possible attribute for the simple_material object + 2. gets every possible attribute for the simple_material object + 3. asserts that what was set and what was gotten are the same + """ + # new attributes + new_name = "new material name" + + new_identifiers = [{"bigsmiles": "6789"}] + + new_parent_material = cript.Material( + name="my parent material", + identifiers=[ + {"bigsmiles": "9876"}, + ], + ) + + new_material_keywords = ["acetylene"] + + new_components = [ + cript.Material( + name="my component material 1", + identifiers=[ + {"bigsmiles": "654321"}, + ], + ), + ] + + # set all attributes for Material node + simple_material_node.name = new_name + simple_material_node.identifiers = new_identifiers + simple_material_node.property = [simple_property_node] + simple_material_node.parent_material = new_parent_material + simple_material_node.computational_forcefield = simple_computational_forcefield_node + simple_material_node.keyword = new_material_keywords + simple_material_node.component = new_components + + # get all attributes and assert that they are equal to the setter + assert simple_material_node.name == new_name + assert simple_material_node.identifiers == new_identifiers + assert simple_material_node.property == [simple_property_node] + assert simple_material_node.parent_material == new_parent_material + assert simple_material_node.computational_forcefield == simple_computational_forcefield_node + assert simple_material_node.keyword == new_material_keywords + assert simple_material_node.component == new_components + + +def test_serialize_material_to_json(complex_material_dict, complex_material_node) -> None: + """ + tests that it can correctly turn the material node into its equivalent JSON + """ + # the JSON that the material should serialize to + + # compare dicts because that is more accurate + ref_dict = json.loads(complex_material_node.get_json(condense_to_uuid={}).json) + ref_dict = strip_uid_from_dict(ref_dict) + + assert ref_dict == complex_material_dict + + +def test_integration_material(cript_api, simple_project_node, simple_material_node) -> None: + """ + integration test between Python SDK and API Client + + tests both POST and GET + + 1. create a project + 1. create a material + 1. add a material to project + 1. save the project + 1. get the project + 1. deserialize the project + 1. compare the project node that was sent to API and the one API gave, that they are the same + """ + # ========= test create ========= + # creating unique name to not bump into unique errors + simple_project_node.name = f"test_integration_project_name_{uuid.uuid4().hex}" + simple_material_node.name = f"test_integration_material_name_{uuid.uuid4().hex}" + + simple_project_node.material = [simple_material_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # update material attribute to trigger update + simple_project_node.material[0].identifiers = [{"bigsmiles": "my bigsmiles UPDATED"}] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_process.py b/tests/nodes/primary_nodes/test_process.py new file mode 100644 index 000000000..183b1b9cf --- /dev/null +++ b/tests/nodes/primary_nodes/test_process.py @@ -0,0 +1,192 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_simple_process() -> None: + """ + tests that a simple process node can be correctly created + """ + + # process fields + my_process_type = "affinity_pure" + my_process_description = "my simple material description" + my_process_keywords = ["anionic"] + + # create process node + my_process = cript.Process(name="my process name", type=my_process_type, description=my_process_description, keyword=my_process_keywords) + + # assertions + assert isinstance(my_process, cript.Process) + assert my_process.type == my_process_type + assert my_process.description == my_process_description + assert my_process.keyword == my_process_keywords + + +def test_complex_process_node(complex_ingredient_node, simple_equipment_node, complex_citation_node, simple_property_node, simple_condition_node, simple_material_node, simple_process_node, complex_equipment_node, complex_condition_node) -> None: + """ + create a process node with all possible arguments + + Notes + ----- + * indirectly tests the vocabulary as well, as it gives it valid vocabulary + """ + # TODO clean up this test and use fixtures from conftest.py + + my_process_name = "my complex process node name" + my_process_type = "affinity_pure" + my_process_description = "my simple material description" + + process_waste = [ + cript.Material(name="my process waste material 1", identifiers=[{"bigsmiles": "process waste bigsmiles"}]), + ] + + my_process_keywords = [ + "anionic", + "annealing_sol", + ] + + # create complex process + citation = copy.deepcopy(complex_citation_node) + prop = cript.Property("n_neighbor", "value", 2.0, None) + + my_complex_process = cript.Process( + name=my_process_name, + type=my_process_type, + ingredient=[complex_ingredient_node], + description=my_process_description, + equipment=[complex_equipment_node], + product=[simple_material_node], + waste=process_waste, + prerequisite_process=[simple_process_node], + condition=[complex_condition_node], + property=[prop], + keyword=my_process_keywords, + citation=[citation], + ) + # assertions + assert my_complex_process.type == my_process_type + assert my_complex_process.ingredient == [complex_ingredient_node] + assert my_complex_process.description == my_process_description + assert my_complex_process.equipment == [complex_equipment_node] + assert my_complex_process.product == [simple_material_node] + assert my_complex_process.waste == process_waste + assert my_complex_process.prerequisite_process[-1] == simple_process_node + assert my_complex_process.condition[-1] == complex_condition_node + assert my_complex_process.property[-1] == prop + assert my_complex_process.keyword[-1] == my_process_keywords[-1] + assert my_complex_process.citation[-1] == citation + + +def test_process_getters_and_setters( + simple_process_node, + complex_ingredient_node, + complex_equipment_node, + simple_material_node, + complex_condition_node, + simple_property_node, + complex_citation_node, +) -> None: + """ + test getters and setters and be sure they are working correctly + + 1. set simple_process_node attributes to something new + 2. get all attributes and check that they have been set correctly + + Notes + ----- + indirectly tests setting the data type to correct vocabulary + """ + new_process_type = "blow_molding" + new_process_description = "my new process description" + new_process_keywords = "annealing_sol" + + # test setters + simple_process_node.type = new_process_type + simple_process_node.ingredient = [complex_ingredient_node] + simple_process_node.description = new_process_description + equipment = complex_equipment_node + simple_process_node.equipment = [equipment] + product = simple_material_node + simple_process_node.product = [product] + simple_process_node.waste = [simple_material_node] + simple_process_node.prerequisite_process = [simple_process_node] + simple_process_node.condition = [complex_condition_node] + prop = cript.Property("n_neighbor", "value", 2.0, None) + simple_process_node.property += [prop] + simple_process_node.keyword = [new_process_keywords] + citation = copy.deepcopy(complex_citation_node) + simple_process_node.citation = [citation] + + # test getters + assert simple_process_node.type == new_process_type + assert simple_process_node.ingredient == [complex_ingredient_node] + assert simple_process_node.description == new_process_description + assert simple_process_node.equipment[-1] == equipment + assert simple_process_node.product[-1] == product + assert simple_process_node.waste == [simple_material_node] + assert simple_process_node.prerequisite_process == [simple_process_node] + assert simple_process_node.condition == [complex_condition_node] + assert simple_process_node.property[-1] == prop + assert simple_process_node.keyword == [new_process_keywords] + assert simple_process_node.citation[-1] == citation + + +def test_serialize_process_to_json(simple_process_node) -> None: + """ + test serializing process node to JSON + """ + expected_process_dict = {"node": ["Process"], "name": "my process name", "type": "affinity_pure"} + + # comparing dicts because they are more accurate + ref_dict = json.loads(simple_process_node.json) + ref_dict = strip_uid_from_dict(ref_dict) + assert ref_dict == expected_process_dict + + +def test_integration_simple_process(cript_api, simple_project_node, simple_process_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + simple_project_node.name = f"test_integration_process_name_{uuid.uuid4().hex}" + + simple_project_node.collection[0].experiment[0].process = [simple_process_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + +def test_integration_complex_process(cript_api, simple_project_node, simple_process_node, simple_material_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_process_name_{uuid.uuid4().hex}" + + # rename material to not get duplicate error + simple_material_node.name += f"{simple_material_node.name} {uuid.uuid4().hex}" + + # add material to the project to not get OrphanedNodeError + simple_project_node.material += [simple_material_node] + + simple_project_node.collection[0].experiment[0].process = [simple_process_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].process[0].description = "process description UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_project.py b/tests/nodes/primary_nodes/test_project.py new file mode 100644 index 000000000..65d2e63a1 --- /dev/null +++ b/tests/nodes/primary_nodes/test_project.py @@ -0,0 +1,79 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_project(simple_collection_node) -> None: + """ + test that a project node with only required arguments can be created + """ + my_project_name = "my Project name" + + my_project = cript.Project(name=my_project_name, collection=[simple_collection_node]) + + # assertions + assert isinstance(my_project, cript.Project) + assert my_project.name == my_project_name + assert my_project.collection == [simple_collection_node] + + +def test_project_getters_and_setters(simple_project_node, simple_collection_node, complex_collection_node, simple_material_node) -> None: + """ + tests that a Project node getters and setters are working as expected + + 1. use a simple project node + 2. set all of its attributes to something new + 3. get all of its attributes + 4. what was set and what was gotten should be equivalent + """ + new_project_name = "my new project name" + + # set attributes + simple_project_node.name = new_project_name + simple_project_node.collection = [complex_collection_node] + simple_project_node.material = [simple_material_node] + + # get attributes and assert that they are the same + assert simple_project_node.name == new_project_name + assert simple_project_node.collection == [complex_collection_node] + assert simple_project_node.material == [simple_material_node] + + +def test_serialize_project_to_json(complex_project_node, complex_project_dict) -> None: + """ + tests that a Project node can be correctly converted to a JSON + """ + expected_dict = complex_project_dict + + # Since we condense those to UUID we remove them from the expected dict. + expected_dict["admin"] = [{}] + expected_dict["member"] = [{}] + + # comparing dicts instead of JSON strings because dict comparison is more accurate + serialized_project: dict = json.loads(complex_project_node.get_json(condense_to_uuid={}).json) + serialized_project = strip_uid_from_dict(serialized_project) + + assert serialized_project == strip_uid_from_dict(expected_dict) + + +def test_integration_project(cript_api, simple_project_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_project_name_{uuid.uuid4().hex}" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + simple_project_node.notes = "project notes UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/primary_nodes/test_reference.py b/tests/nodes/primary_nodes/test_reference.py new file mode 100644 index 000000000..05374e998 --- /dev/null +++ b/tests/nodes/primary_nodes/test_reference.py @@ -0,0 +1,196 @@ +import json +import uuid +import warnings + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_simple_reference() -> None: + """ + tests to see if a simple reference node with only minimal arguments can be successfully created + """ + my_reference_type = "journal_article" + my_reference_title = "'Living' Polymers" + + my_reference = cript.Reference(type=my_reference_type, title=my_reference_title) + + assert isinstance(my_reference, cript.Reference) + assert my_reference.type == my_reference_type + assert my_reference.title == my_reference_title + + +def test_complex_reference() -> None: + """ + tests that a complex reference node with all optional parameters can be made + """ + + # reference attributes + reference_type = "journal_article" + title = "'Living' Polymers" + authors = ["Dylan J. Walsh", "Bradley D. Olsen"] + journal = "Nature" + publisher = "Springer" + year = 2019 + volume = 3 + issue = 5 + pages = [123, 456, 789] + doi = "10.1038/1781168a0" + issn = "1476-4687" + arxiv_id = "1501" + pmid = 12345678 + website = "https://criptapp.org" + + # create complex reference node + my_reference = cript.Reference( + type=reference_type, + title=title, + author=authors, + journal=journal, + publisher=publisher, + year=year, + volume=volume, + issue=issue, + pages=pages, + doi=doi, + issn=issn, + arxiv_id=arxiv_id, + pmid=pmid, + website=website, + ) + + # assertions + assert isinstance(my_reference, cript.Reference) + assert my_reference.type == reference_type + assert my_reference.title == title + assert my_reference.author == authors + assert my_reference.journal == journal + assert my_reference.publisher == publisher + assert my_reference.year == year + assert my_reference.volume == volume + assert my_reference.issue == issue + assert my_reference.pages == pages + assert my_reference.doi == doi + assert my_reference.issn == issn + assert my_reference.arxiv_id == arxiv_id + assert my_reference.pmid == pmid + assert my_reference.website == website + + +def test_getters_and_setters_reference(complex_reference_node) -> None: + """ + testing that the complex reference node is working correctly + """ + + # new attributes for the setter + reference_type = "journal_article" + title = "my title" + authors = ["Ludwig Schneider"] + journal = "my journal" + publisher = "my publisher" + year = 2023 + volume = 1 + issue = 2 + pages = [123, 456] + doi = "100.1038/1781168a0" + issn = "1456-4687" + arxiv_id = "1501" + pmid = 12345678 + website = "https://criptapp.org" + + # set reference attributes + complex_reference_node.type = reference_type + complex_reference_node.title = title + complex_reference_node.author = authors + complex_reference_node.journal = journal + complex_reference_node.publisher = publisher + complex_reference_node.publisher = publisher + complex_reference_node.year = year + complex_reference_node.volume = volume + complex_reference_node.issue = issue + complex_reference_node.pages = pages + complex_reference_node.doi = doi + complex_reference_node.issn = issn + complex_reference_node.arxiv_id = arxiv_id + complex_reference_node.pmid = pmid + complex_reference_node.website = website + + # assertions: test getter and setter + assert isinstance(complex_reference_node, cript.Reference) + assert complex_reference_node.type == reference_type + assert complex_reference_node.title == title + assert complex_reference_node.author == authors + assert complex_reference_node.journal == journal + assert complex_reference_node.publisher == publisher + assert complex_reference_node.publisher == publisher + assert complex_reference_node.year == year + assert complex_reference_node.volume == volume + assert complex_reference_node.issue == issue + assert complex_reference_node.pages == pages + assert complex_reference_node.doi == doi + assert complex_reference_node.issn == issn + assert complex_reference_node.arxiv_id == arxiv_id + assert complex_reference_node.pmid == pmid + assert complex_reference_node.website == website + + +def test_reference_vocabulary() -> None: + """ + tests that a reference node type with valid CRIPT controlled vocabulary runs successfully + and invalid reference type gives the correct errors + """ + pass + + +def test_reference_conditional_attributes() -> None: + """ + test conditional attributes (DOI and ISSN) that they are validating correctly + and that an error is correctly raised when they are needed but not provided + """ + pass + + +def test_serialize_reference_to_json(complex_reference_node, complex_reference_dict) -> None: + """ + tests that it can correctly turn the data node into its equivalent JSON + """ + + # convert reference to json and then to dict for better comparison + reference_dict = json.loads(complex_reference_node.json) + reference_dict = strip_uid_from_dict(reference_dict) + + assert reference_dict == complex_reference_dict + + +def test_integration_reference(cript_api, simple_project_node, complex_citation_node, complex_reference_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + + Notes + ----- + indirectly tests citation node along with reference node + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_reference_name_{uuid.uuid4().hex}" + + simple_project_node.collection[0].citation = [complex_citation_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # TODO deserialization with citation in collection is wrong + # raise Exception("Citation is missing from collection node from API") + warnings.warn("Uncomment the Reference integration test Exception and check the API response has citation on collection") + + # ========= test update ========= + # change simple attribute to trigger update + # TODO can enable this later + # complex_reference_node.type = "book" + simple_project_node.collection[0].citation[0].reference.title = "reference title UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_algorithm.py b/tests/nodes/subobjects/test_algorithm.py new file mode 100644 index 000000000..86f106343 --- /dev/null +++ b/tests/nodes/subobjects/test_algorithm.py @@ -0,0 +1,53 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_setter_getter(simple_algorithm_node, complex_citation_node): + a = simple_algorithm_node + a.key = "berendsen" + assert a.key == "berendsen" + a.type = "integration" + assert a.type == "integration" + a.citation += [complex_citation_node] + assert strip_uid_from_dict(json.loads(a.citation[0].json)) == strip_uid_from_dict(json.loads(complex_citation_node.json)) + + +def test_json(simple_algorithm_node, simple_algorithm_dict, complex_citation_node): + a = simple_algorithm_node + a_dict = json.loads(a.json) + assert strip_uid_from_dict(a_dict) == simple_algorithm_dict + print(a.get_json(indent=2).json) + a2 = cript.load_nodes_from_json(a.json) + assert strip_uid_from_dict(json.loads(a2.json)) == strip_uid_from_dict(a_dict) + + +def test_integration_algorithm(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_computation_node, simple_software_configuration, simple_algorithm_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_software_configuration_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + + simple_project_node.collection[0].experiment = [simple_experiment_node] + simple_project_node.collection[0].experiment[0].computation = [simple_computation_node] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration = [simple_software_configuration] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].algorithm = [simple_algorithm_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change a simple attribute to trigger update + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].algorithm[0].type = "integration" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_citation.py b/tests/nodes/subobjects/test_citation.py new file mode 100644 index 000000000..5d02c9735 --- /dev/null +++ b/tests/nodes/subobjects/test_citation.py @@ -0,0 +1,50 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_json(complex_citation_node, complex_citation_dict): + c = complex_citation_node + c_dict = strip_uid_from_dict(json.loads(c.json)) + assert c_dict == complex_citation_dict + c2 = cript.load_nodes_from_json(c.json) + c2_dict = strip_uid_from_dict(json.loads(c2.json)) + assert c_dict == c2_dict + + +def test_setter_getter(complex_citation_node, complex_reference_node): + c = complex_citation_node + c.type = "replicated" + assert c.type == "replicated" + new_ref = complex_reference_node + new_ref.title = "foo bar" + c.reference = new_ref + assert c.reference == new_ref + + +def test_integration_citation(cript_api, simple_project_node, simple_collection_node, complex_citation_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_citation_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + + simple_project_node.collection[0].citation = [complex_citation_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].citation[0].type = "extracted_by_human" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_computational_forcefiled.py b/tests/nodes/subobjects/test_computational_forcefiled.py new file mode 100644 index 000000000..f3f9b9eee --- /dev/null +++ b/tests/nodes/subobjects/test_computational_forcefiled.py @@ -0,0 +1,68 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_computational_forcefield(complex_computational_forcefield_node, complex_computational_forcefield_dict): + cf = complex_computational_forcefield_node + cf_dict = strip_uid_from_dict(json.loads(cf.json)) + assert cf_dict == strip_uid_from_dict(complex_computational_forcefield_dict) + cf2 = cript.load_nodes_from_json(cf.json) + assert strip_uid_from_dict(json.loads(cf.json)) == strip_uid_from_dict(json.loads(cf2.json)) + + +def test_setter_getter(complex_computational_forcefield_node, complex_citation_node, simple_data_node): + cf2 = complex_computational_forcefield_node + cf2.key = "opls_ua" + assert cf2.key == "opls_ua" + + cf2.building_block = "united_atoms" + assert cf2.building_block == "united_atoms" + + cf2.implicit_solvent = "" + assert cf2.implicit_solvent == "" + + cf2.source = "Iterative Boltzmann inversion" + assert cf2.source == "Iterative Boltzmann inversion" + + cf2.description = "generic polymer model" + assert cf2.description == "generic polymer model" + + data = simple_data_node + cf2.data += [data] + assert cf2.data[-1] is data + + assert len(cf2.citation) == 1 + citation2 = copy.deepcopy(complex_citation_node) + cf2.citation += [citation2] + assert cf2.citation[1] == citation2 + + +def test_integration_computational_forcefield(cript_api, simple_project_node, simple_material_node, simple_computational_forcefield_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_computational_forcefield_{uuid.uuid4().hex}" + + # renaming to avoid API duplicate node error + simple_material_node.name = f"{simple_material_node.name}_{uuid.uuid4().hex}" + + simple_project_node.material = [simple_material_node] + simple_project_node.material[0].computational_forcefield = simple_computational_forcefield_node + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.material[0].computational_forcefield.description = "material computational_forcefield description UPDATED" + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_condition.py b/tests/nodes/subobjects/test_condition.py new file mode 100644 index 000000000..4881d9563 --- /dev/null +++ b/tests/nodes/subobjects/test_condition.py @@ -0,0 +1,71 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + + +def test_json(complex_condition_node, complex_condition_dict): + c = complex_condition_node + c_dict = json.loads(c.get_json(condense_to_uuid={}).json) + assert strip_uid_from_dict(c_dict) == strip_uid_from_dict(complex_condition_dict) + ## TODO address deserialization of uid and uuid nodes + # c_deepcopy = copy.deepcopy(c) + # c2 = cript.load_nodes_from_json(c_deepcopy.get_json(condense_to_uuid={}).json) + # assert strip_uid_from_dict(json.loads(c2.get_json(condense_to_uuid={}).json)) == strip_uid_from_dict(json.loads(c.get_json(condense_to_uuid={}).json)) + + +def test_setter_getters(complex_condition_node, complex_data_node): + c2 = complex_condition_node + c2.key = "pressure" + assert c2.key == "pressure" + c2.type = "avg" + assert c2.type == "avg" + + c2.set_value(1, "bar") + assert c2.value == 1 + assert c2.unit == "bar" + + c2.descriptor = "ambient pressure" + assert c2.descriptor == "ambient pressure" + + c2.set_uncertainty(0.1, "stdev") + assert c2.uncertainty == 0.1 + assert c2.uncertainty_type == "stdev" + + c2.set_id = None + assert c2.set_id is None + c2.measurement_id = None + assert c2.measurement_id is None + + c2.data = [complex_data_node] + assert c2.data[0] is complex_data_node + + +def test_integration_process_condition(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_computation_node, simple_condition_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + """ + # ========= test create ========= + # renamed project node to avoid duplicate project node API error + simple_project_node.name = f"{simple_project_node.name}_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + + simple_project_node.collection[0].experiment = [simple_experiment_node] + + simple_project_node.collection[0].experiment[0].computation = [simple_computation_node] + + simple_project_node.collection[0].experiment[0].computation[0].condition = [simple_condition_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].computation[0].condition[0].descriptor = "condition descriptor UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_equipment.py b/tests/nodes/subobjects/test_equipment.py new file mode 100644 index 000000000..c0cf45356 --- /dev/null +++ b/tests/nodes/subobjects/test_equipment.py @@ -0,0 +1,62 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + + +def test_json(complex_equipment_node, complex_equipment_dict): + e = complex_equipment_node + e_dict = strip_uid_from_dict(json.loads(e.get_json(condense_to_uuid={}).json)) + assert strip_uid_from_dict(e_dict) == strip_uid_from_dict(complex_equipment_dict) + e2 = copy.deepcopy(e) + + assert strip_uid_from_dict(json.loads(e.get_json(condense_to_uuid={}).json)) == strip_uid_from_dict(json.loads(e2.get_json(condense_to_uuid={}).json)) + + +def test_setter_getter(complex_equipment_node, complex_condition_node, complex_file_node, complex_citation_node): + e2 = complex_equipment_node + e2.key = "glass_beaker" + assert e2.key == "glass_beaker" + e2.description = "Fancy glassware" + assert e2.description == "Fancy glassware" + + assert len(e2.condition) == 1 + c2 = complex_condition_node + e2.condition += [c2] + assert e2.condition[1] == c2 + + assert len(e2.file) == 0 + e2.file += [complex_file_node] + assert e2.file[-1] is complex_file_node + + cit2 = copy.deepcopy(complex_citation_node) + assert len(e2.citation) == 1 + e2.citation += [cit2] + assert e2.citation[1] == cit2 + + +def test_integration_equipment(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_process_node, simple_equipment_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_equipment_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + simple_project_node.collection[0].experiment = [simple_experiment_node] + simple_project_node.collection[0].experiment[0].process = [simple_process_node] + simple_project_node.collection[0].experiment[0].process[0].equipment = [simple_equipment_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].process[0].equipment[0].description = "equipment description UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_ingredient.py b/tests/nodes/subobjects/test_ingredient.py new file mode 100644 index 000000000..ce18b97e4 --- /dev/null +++ b/tests/nodes/subobjects/test_ingredient.py @@ -0,0 +1,68 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_json(complex_ingredient_node, complex_ingredient_dict): + i = complex_ingredient_node + i_dict = json.loads(i.json) + i_dict["material"] = {} + j_dict = strip_uid_from_dict(complex_ingredient_dict) + j_dict["material"] = {} + assert strip_uid_from_dict(i_dict) == j_dict + i2 = cript.load_nodes_from_json(i.get_json(condense_to_uuid={}).json) + ref_dict = strip_uid_from_dict(json.loads(i.get_json(condense_to_uuid={}).json)) + ref_dict["material"] = {} + ref_dictB = strip_uid_from_dict(json.loads(i2.get_json(condense_to_uuid={}).json)) + ref_dictB["material"] = {} + assert ref_dict == ref_dictB + + +def test_getter_setter(complex_ingredient_node, complex_quantity_node, simple_material_node): + i2 = complex_ingredient_node + q2 = complex_quantity_node + i2.set_material(simple_material_node, [complex_quantity_node]) + assert i2.material is simple_material_node + assert i2.quantity[-1] is q2 + + i2.keyword = ["monomer"] + assert i2.keyword == ["monomer"] + + +def test_integration_ingredient(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_process_node, simple_ingredient_node, simple_material_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + Project with material + Material has ingredient sub-object + 1. GET JSON from API + 1. check their fields equal + + Notes + ---- + since `ingredient` requires a `quantity` this test also indirectly tests `quantity` + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_ingredient_{uuid.uuid4().hex}" + + # assemble needed nodes + simple_project_node.collection = [simple_collection_node] + simple_project_node.collection[0].experiment = [simple_experiment_node] + simple_project_node.collection[0].experiment[0].process = [simple_process_node] + simple_project_node.collection[0].experiment[0].process[0].ingredient = [simple_ingredient_node] + + # add orphaned material node to project + simple_project_node.material = [simple_material_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].process[0].ingredient[0].keyword = ["polymer"] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_parameter.py b/tests/nodes/subobjects/test_parameter.py new file mode 100644 index 000000000..0ef433653 --- /dev/null +++ b/tests/nodes/subobjects/test_parameter.py @@ -0,0 +1,53 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_parameter_setter_getter(complex_parameter_node): + p = complex_parameter_node + p.key = "damping_time" + assert p.key == "damping_time" + p.value = 15.0 + assert p.value == 15.0 + p.unit = "m" + assert p.unit == "m" + + +def test_parameter_json_serialization(complex_parameter_node, complex_parameter_dict): + p = complex_parameter_node + p_str = p.json + p2 = cript.load_nodes_from_json(p_str) + p_dict = json.loads(p2.json) + assert strip_uid_from_dict(p_dict) == complex_parameter_dict + assert p2.json == p.json + + +def test_integration_parameter(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_computation_node, simple_software_configuration, simple_algorithm_node, complex_parameter_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_parameter_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + simple_project_node.collection[0].experiment = [simple_experiment_node] + simple_project_node.collection[0].experiment[0].computation = [simple_computation_node] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration = [simple_software_configuration] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].algorithm = [simple_algorithm_node] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].algorithm[0].parameter = [complex_parameter_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # update simple attribute to trigger update + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].algorithm[0].parameter[0].value = 123456789 + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_property.py b/tests/nodes/subobjects/test_property.py new file mode 100644 index 000000000..63a01030b --- /dev/null +++ b/tests/nodes/subobjects/test_property.py @@ -0,0 +1,85 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_json(complex_property_node, complex_property_dict): + p = complex_property_node + p_dict = strip_uid_from_dict(json.loads(p.get_json(condense_to_uuid={}).json)) + assert p_dict == complex_property_dict + p2 = cript.load_nodes_from_json(p.get_json(condense_to_uuid={}).json) + + assert strip_uid_from_dict(json.loads(p2.get_json(condense_to_uuid={}).json)) == strip_uid_from_dict(json.loads(p.get_json(condense_to_uuid={}).json)) + + +def test_setter_getter(complex_property_node, simple_material_node, simple_process_node, complex_condition_node, simple_data_node, simple_computation_node, complex_citation_node): + p2 = complex_property_node + p2.key = "modulus_loss" + assert p2.key == "modulus_loss" + p2.type = "min" + assert p2.type == "min" + p2.set_value(600.1, "MPa") + assert p2.value == 600.1 + assert p2.unit == "MPa" + + p2.set_uncertainty(10.5, "stdev") + assert p2.uncertainty == 10.5 + assert p2.uncertainty_type == "stdev" + + p2.component += [simple_material_node] + assert p2.component[-1] is simple_material_node + p2.structure = "structure2" + assert p2.structure == "structure2" + + p2.method = "scale" + assert p2.method == "scale" + + p2.sample_preparation = simple_process_node + assert p2.sample_preparation is simple_process_node + assert len(p2.condition) == 1 + p2.condition += [complex_condition_node] + assert len(p2.condition) == 2 + p2.data = [simple_data_node] + assert p2.data[0] is simple_data_node + + p2.computation += [simple_computation_node] + assert p2.computation[-1] is simple_computation_node + + assert len(p2.citation) == 1 + cit2 = copy.deepcopy(complex_citation_node) + p2.citation += [cit2] + assert len(p2.citation) == 2 + assert p2.citation[-1] == cit2 + p2.notes = "notes2" + assert p2.notes == "notes2" + + +def test_integration_material_property(cript_api, simple_project_node, simple_material_node, simple_property_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + Project with material + Material has property sub-object + 1. GET JSON from API + 1. check their fields equal + """ + # ========= test create ========= + # rename property and material to avoid duplicate node API error + simple_project_node.name = f"test_integration_material_property_{uuid.uuid4().hex}" + simple_material_node.name = f"{simple_material_node.name}_{uuid.uuid4().hex}" + + simple_project_node.material = [simple_material_node] + simple_project_node.material[0].property = [simple_property_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.material[0].property[0].notes = "property sub-object notes UPDATED" + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_quantity.py b/tests/nodes/subobjects/test_quantity.py new file mode 100644 index 000000000..87542fbc9 --- /dev/null +++ b/tests/nodes/subobjects/test_quantity.py @@ -0,0 +1,58 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_json(complex_quantity_node, complex_quantity_dict): + q = complex_quantity_node + q_dict = json.loads(q.json) + assert strip_uid_from_dict(q_dict) == complex_quantity_dict + q2 = cript.load_nodes_from_json(q.json) + assert q2.json == q.json + + +def test_getter_setter(complex_quantity_node): + q = complex_quantity_node + q.value = 0.5 + assert q.value == 0.5 + q.set_uncertainty(0.1, "stderr") + assert q.uncertainty == 0.1 + assert q.uncertainty_type == "stderr" + + q.set_key_unit("volume", "m**3") + assert q.key == "volume" + assert q.unit == "m**3" + + +def test_integration_quantity(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_process_node, simple_ingredient_node, simple_material_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + Project with material + Material has ingredient sub-object + 1. GET JSON from API + 1. check their fields equal + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_quantity_{uuid.uuid4().hex}" + + # assemble needed nodes + simple_project_node.collection = [simple_collection_node] + simple_project_node.collection[0].experiment = [simple_experiment_node] + simple_project_node.collection[0].experiment[0].process = [simple_process_node] + simple_project_node.collection[0].experiment[0].process[0].ingredient = [simple_ingredient_node] + + # add orphaned material node to project + simple_project_node.material = [simple_material_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].process[0].ingredient[0].quantity[0].value = 123456789 + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_software.py b/tests/nodes/subobjects/test_software.py new file mode 100644 index 000000000..ba557ad48 --- /dev/null +++ b/tests/nodes/subobjects/test_software.py @@ -0,0 +1,68 @@ +import copy +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_json(complex_software_node, complex_software_dict): + s = complex_software_node + s_dict = strip_uid_from_dict(json.loads(s.json)) + assert s_dict == complex_software_dict + s2 = cript.load_nodes_from_json(s.json) + assert s2.json == s.json + + +def test_setter_getter(complex_software_node): + s2 = complex_software_node + s2.name = "PySAGES" + assert s2.name == "PySAGES" + s2.version = "v0.3.0" + assert s2.version == "v0.3.0" + s2.source = "https://github.com/SSAGESLabs/PySAGES" + assert s2.source == "https://github.com/SSAGESLabs/PySAGES" + + +def test_uuid(complex_software_node): + s = complex_software_node + + # Deep copies should not share uuid (or uids) or urls + s2 = copy.deepcopy(complex_software_node) + assert s.uuid != s2.uuid + assert s.uid != s2.uid + assert s.url != s2.url + + # Loads from json have the same uuid and url + s3 = cript.load_nodes_from_json(s.json) + assert s3.uuid == s.uuid + assert s3.url == s.url + + +def test_integration_software(cript_api, simple_project_node, simple_computation_node, simple_software_configuration, complex_software_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + + Notes + ----- + indirectly tests citation node along with reference node + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_software_name_{uuid.uuid4().hex}" + + simple_project_node.collection[0].experiment[0].computation = [simple_computation_node] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration = [simple_software_configuration] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].software = complex_software_node + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].software.version = "software version UPDATED" + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/subobjects/test_software_configuration.py b/tests/nodes/subobjects/test_software_configuration.py new file mode 100644 index 000000000..eddd7388e --- /dev/null +++ b/tests/nodes/subobjects/test_software_configuration.py @@ -0,0 +1,63 @@ +import json +import uuid + +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_json(complex_software_configuration_node, complex_software_configuration_dict): + sc = complex_software_configuration_node + sc_dict = strip_uid_from_dict(json.loads(sc.json)) + assert sc_dict == complex_software_configuration_dict + sc2 = cript.load_nodes_from_json(sc.json) + + assert strip_uid_from_dict(json.loads(sc2.json)) == strip_uid_from_dict(json.loads(sc.json)) + + +def test_setter_getter(complex_software_configuration_node, simple_algorithm_node, complex_citation_node): + sc2 = complex_software_configuration_node + software2 = sc2.software + sc2.software = software2 + assert sc2.software is software2 + + # assert len(sc2.algorithm) == 1 + # al2 = simple_algorithm_node + # print(sc2.get_json(indent=2,sortkeys=False).json) + # print(al2.get_json(indent=2,sortkeys=False).json) + # sc2.algorithm += [al2] + # assert sc2.algorithm[1] is al2 + + sc2.notes = "my new fancy notes" + assert sc2.notes == "my new fancy notes" + + # cit2 = complex_citation_node + # assert len(sc2.citation) == 1 + # sc2.citation += [cit2] + # assert sc2.citation[1] == cit2 + + +def test_integration_software_configuration(cript_api, simple_project_node, simple_collection_node, simple_experiment_node, simple_computation_node, simple_software_configuration): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert JSON sent and JSON received are the same + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_software_configuration_{uuid.uuid4().hex}" + + simple_project_node.collection = [simple_collection_node] + simple_project_node.collection[0].experiment = [simple_experiment_node] + simple_project_node.collection[0].experiment[0].computation = [simple_computation_node] + simple_project_node.collection[0].experiment[0].computation[0].software_configuration = [simple_software_configuration] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].computation[0].software_configuration[0].notes = "software configuration integration test UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/supporting_nodes/test_file.py b/tests/nodes/supporting_nodes/test_file.py new file mode 100644 index 000000000..aaad885aa --- /dev/null +++ b/tests/nodes/supporting_nodes/test_file.py @@ -0,0 +1,220 @@ +import copy +import json +import os +import uuid + +import pytest +from integration_test_helper import integrate_nodes_helper +from util import strip_uid_from_dict + +import cript + + +def test_create_file() -> None: + """ + tests that a simple file with only required attributes can be created + """ + file_node = cript.File(name="my file name", source="https://google.com", type="calibration") + + assert isinstance(file_node, cript.File) + + +def test_source_is_local(tmp_path, tmp_path_factory) -> None: + """ + tests that the `_is_local_file()` function is working well + and it can correctly tell the difference between local file, URL, cloud storage object_name correctly + + ## test cases + ### web sources + * AWS S3 cloud storage object_name + * web URL file source + example: `https://my-website/my-file-name.pdf` + ## local file sources + * local file path + * absolute file path + * relative file path + """ + from cript.nodes.supporting_nodes.file import _is_local_file + + # URL + assert _is_local_file(file_source="https://my-website/my-uploaded-file.pdf") is False + + # S3 object_name + assert _is_local_file(file_source="s3_directory/s3_uploaded_file.txt") is False + + # create temporary file + temp_file = tmp_path_factory.mktemp("test_source_is_local") / "temp_file.txt" + temp_file.write_text("hello world") # write something to the file to force creation + + # Absolute file path + absolute_file_path: str = str(temp_file.resolve()) + assert _is_local_file(file_source=absolute_file_path) is True + + # Relative file path from cwd + # get relative file path to temp_file from cwd + relative_file_path: str = os.path.relpath(absolute_file_path, os.getcwd()) + assert _is_local_file(file_source=relative_file_path) is True + + +@pytest.mark.skip(reason="test is outdated because files now upload on api.save()") +def test_local_file_source_upload_and_download(tmp_path_factory) -> None: + """ + upload a file and download it and be sure the contents are the same + + 1. create a temporary file and get its file path + 1. create a unique string + 1. write unique string to temporary file + 1. create a file node with the source being the temporary file + 1. the file should then be automatically uploaded to cloud storage + and the source should be replaced with cloud storage source beginning with `https://` + 1. download the file to a temporary path + 1. read that file text and assert that the string written and read are the same + """ + import datetime + import uuid + + file_text: str = ( + f"This is an automated test from the Python SDK within " + f"`tests/nodes/supporting_nodes/test_file.py/test_local_file_source_upload_and_download()` " + f"checking that the file source is automatically and correctly uploaded to AWS S3. " + f"The test is conducted on UTC time of '{datetime.datetime.utcnow()}' " + f"with the unique UUID of '{str(uuid.uuid4())}'" + ) + + # create a temp file and write to it + upload_file_dir = tmp_path_factory.mktemp("file_test_upload_file_dir") + local_file_path = upload_file_dir / "my_upload_file.txt" + local_file_path.write_text(file_text) + + # create a file node with a local file path + my_file = cript.File(name="my local file source node", source=str(local_file_path), type="data") + + # check that the file source has been uploaded to cloud storage and source has changed to reflect that + assert my_file.source.startswith("tests/") + + # Get the temporary directory path and clean up handled by pytest + download_file_dir = tmp_path_factory.mktemp("file_test_download_file_dir") + download_file_name = "my_downloaded_file.txt" + + # download file + my_file.download(destination_directory_path=download_file_dir, file_name=download_file_name) + + # the path the file was downloaded to and can be read from + downloaded_local_file_path = download_file_dir / download_file_name + + # read file contents from where the file was downloaded + downloaded_file_contents = downloaded_local_file_path.read_text() + + # assert file contents for upload and download are the same + assert downloaded_file_contents == file_text + + +def test_create_file_with_local_source(tmp_path) -> None: + """ + tests that a simple file with only required attributes can be created + with source pointing to a local file on storage + + create a temporary directory with temporary file + """ + # create a temporary file in the temporary directory to test with + file_path = tmp_path / "test.txt" + with open(file_path, "w") as temporary_file: + temporary_file.write("hello world!") + + assert cript.File(name="my file node with local source", source=str(file_path), type="calibration") + + +@pytest.mark.skip(reason="validating file type automatically with DB schema and test not currently needed") +def test_file_type_invalid_vocabulary() -> None: + """ + tests that setting the file type to an invalid vocabulary word gives the expected error + """ + pass + + +def test_file_getters_and_setters(complex_file_node) -> None: + """ + tests that all the getters and setters are working fine + + Notes + ----- + indirectly tests setting the file type to correct vocabulary + """ + # ------- new properties ------- + new_source = "https://bing.com" + new_file_type = "computation_config" + new_file_extension = ".csv" + new_data_dictionary = "new data dictionary" + + # ------- set properties ------- + complex_file_node.source = new_source + complex_file_node.type = new_file_type + complex_file_node.extension = new_file_extension + complex_file_node.data_dictionary = new_data_dictionary + + # ------- assert set and get properties are the same ------- + assert complex_file_node.source == new_source + assert complex_file_node.type == new_file_type + assert complex_file_node.extension == new_file_extension + assert complex_file_node.data_dictionary == new_data_dictionary + + +def test_serialize_file_to_json(complex_file_node) -> None: + """ + tests that it can correctly turn the file node into its equivalent JSON + """ + + expected_file_node_dict = { + "node": ["File"], + "name": "my complex file node fixture", + "source": "https://criptapp.org", + "type": "calibration", + "extension": ".csv", + "data_dictionary": "my file's data dictionary", + } + + # compare dicts for more accurate comparison + assert strip_uid_from_dict(json.loads(complex_file_node.json)) == expected_file_node_dict + + +def test_uuid(complex_file_node): + file_node = complex_file_node + + # Deep copies should not share uuid (or uids) or urls + file_node2 = copy.deepcopy(complex_file_node) + assert file_node.uuid != file_node2.uuid + assert file_node.uid != file_node2.uid + assert file_node.url != file_node2.url + + # Loads from json have the same uuid and url + file_node3 = cript.load_nodes_from_json(file_node.json) + assert file_node3.uuid == file_node.uuid + assert file_node3.url == file_node.url + + +def test_integration_file(cript_api, simple_project_node, simple_data_node): + """ + integration test between Python SDK and API Client + + 1. POST to API + 1. GET from API + 1. assert they're both equal + + Notes + ----- + indirectly tests data node as well because every file node must be in a data node + """ + # ========= test create ========= + simple_project_node.name = f"test_integration_file_{uuid.uuid4().hex}" + + simple_project_node.collection[0].experiment[0].data = [simple_data_node] + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) + + # ========= test update ========= + # change simple attribute to trigger update + simple_project_node.collection[0].experiment[0].data[0].file[0].notes = "file notes UPDATED" + # TODO enable later + # simple_project_node.collection[0].experiment[0].data[0].file[0].data_dictionary = "file data_dictionary UPDATED" + + integrate_nodes_helper(cript_api=cript_api, project_node=simple_project_node) diff --git a/tests/nodes/supporting_nodes/test_user.py b/tests/nodes/supporting_nodes/test_user.py new file mode 100644 index 000000000..ef2ee7ef9 --- /dev/null +++ b/tests/nodes/supporting_nodes/test_user.py @@ -0,0 +1,55 @@ +import json + +import pytest +from util import strip_uid_from_dict + +import cript + + +def test_user_serialization_and_deserialization(complex_user_dict, complex_user_node): + """ + tests just to see if a user node can be correctly deserialized from json + and serialized to json + + Notes + ----- + * since a User node cannot be instantiated + * a User node is created from JSON + * then the user node attributes are compared to what they are expected + * to check that the user node is created correctly + """ + + user_node_dict = complex_user_dict + user_node = complex_user_node + assert user_node_dict == strip_uid_from_dict(json.loads(user_node.json)) + + # deserialize node from JSON + user_node = cript.load_nodes_from_json(nodes_json=user_node.json) + + # checks that the user node has been created correctly by checking the properties + assert user_node.username == user_node_dict["username"] + assert user_node.email == user_node_dict["email"] + assert user_node.orcid == user_node_dict["orcid"] + + # check serialize node to JSON is working correctly + # convert dicts for better comparison + assert strip_uid_from_dict(json.loads(user_node.json)) == user_node_dict + + +def test_set_user_properties(complex_user_node): + """ + tests that setting any user property throws an AttributeError + """ + with pytest.raises(AttributeError): + complex_user_node.username = "my new username" + + with pytest.raises(AttributeError): + complex_user_node.email = "my new email" + + with pytest.raises(AttributeError): + complex_user_node.orcid = "my new orcid" + + with pytest.raises(AttributeError): + # TODO try setting it via a group node + # either way it should give the same error + complex_user_node.orcid = ["my new group"] diff --git a/tests/nodes/test_utils.py b/tests/nodes/test_utils.py new file mode 100644 index 000000000..14826bafc --- /dev/null +++ b/tests/nodes/test_utils.py @@ -0,0 +1,18 @@ +from cript.nodes.util import _is_node_field_valid + + +def test_is_node_field_valid() -> None: + """ + test the `_is_node_field_valid()` function to be sure it does the node type check correctly + + checks both in places it should be valid and invalid + """ + assert _is_node_field_valid(node_type_list=["Project"]) is True + + assert _is_node_field_valid(node_type_list=["Project", "Material"]) is False + + assert _is_node_field_valid(node_type_list=[""]) is False + + assert _is_node_field_valid(node_type_list="Project") is False + + assert _is_node_field_valid(node_type_list=[]) is False diff --git a/tests/test_node_util.py b/tests/test_node_util.py new file mode 100644 index 000000000..38c9a0c60 --- /dev/null +++ b/tests/test_node_util.py @@ -0,0 +1,325 @@ +import copy +import json +from dataclasses import replace + +import pytest +from util import strip_uid_from_dict + +import cript +from cript.nodes.core import get_new_uid +from cript.nodes.exceptions import ( + CRIPTJsonNodeError, + CRIPTJsonSerializationError, + CRIPTNodeSchemaError, + CRIPTOrphanedComputationalProcessError, + CRIPTOrphanedComputationError, + CRIPTOrphanedDataError, + CRIPTOrphanedMaterialError, + CRIPTOrphanedProcessError, +) + + +def test_removing_nodes(simple_algorithm_node, complex_parameter_node, simple_algorithm_dict): + a = simple_algorithm_node + p = complex_parameter_node + a.parameter += [p] + assert strip_uid_from_dict(json.loads(a.json)) != simple_algorithm_dict + a.remove_child(p) + assert strip_uid_from_dict(json.loads(a.json)) == simple_algorithm_dict + + +def test_uid_deserialization(simple_algorithm_node, complex_parameter_node, simple_algorithm_dict): + identifiers = [{"bigsmiles": "123456"}] + material = cript.Material(name="my material", identifiers=identifiers) + + computation = cript.Computation(name="my computation name", type="analysis") + property1 = cript.Property("modulus_shear", "value", 5.0, "GPa", computation=[computation]) + property2 = cript.Property("modulus_loss", "value", 5.0, "GPa", computation=[computation]) + material.property = [property1, property2] + + material2 = cript.load_nodes_from_json(material.json) + assert json.loads(material.json) == json.loads(material2.json) + + material3_dict = { + "node": ["Material"], + "uid": "_:f6d56fdc-9df7-49a1-a843-cf92681932ad", + "uuid": "f6d56fdc-9df7-49a1-a843-cf92681932ad", + "name": "my material", + "property": [ + { + "node": ["Property"], + "uid": "_:82e7270e-9f35-4b35-80a2-faa6e7f670be", + "uuid": "82e7270e-9f35-4b35-80a2-faa6e7f670be", + "key": "modulus_shear", + "type": "value", + "value": 5.0, + "unit": "GPa", + "computation": [{"uid": "_:9ddda2c0-ff8c-4ce3-beb0-e0cafb6169ef"}], + }, + { + "node": ["Property"], + "uid": "_:fc4dfa5e-742c-4d0b-bb66-2185461f4582", + "uuid": "fc4dfa5e-742c-4d0b-bb66-2185461f4582", + "key": "modulus_loss", + "type": "value", + "value": 5.0, + "unit": "GPa", + "computation": [ + { + "uid": "_:9ddda2c0-ff8c-4ce3-beb0-e0cafb6169ef", + } + ], + }, + ], + "bigsmiles": "123456", + } + + with pytest.raises(cript.nodes.exceptions.CRIPTDeserializationUIDError): + cript.load_nodes_from_json(json.dumps(material3_dict)) + + # TODO convince beartype to allow _ProxyUID as well + # material4_dict = { + # "node": [ + # "Material" + # ], + # "uid": "_:f6d56fdc-9df7-49a1-a843-cf92681932ad", + # "uuid": "f6d56fdc-9df7-49a1-a843-cf92681932ad", + # "name": "my material", + # "property": [ + # { + # "node": [ + # "Property" + # ], + # "uid": "_:82e7270e-9f35-4b35-80a2-faa6e7f670be", + # "uuid": "82e7270e-9f35-4b35-80a2-faa6e7f670be", + # "key": "modulus_shear", + # "type": "value", + # "value": 5.0, + # "unit": "GPa", + # "computation": [ + # { + # "node": [ + # "Computation" + # ], + # "uid": "_:9ddda2c0-ff8c-4ce3-beb0-e0cafb6169ef" + # } + # ] + # }, + # { + # "node": [ + # "Property" + # ], + # "uid": "_:fc4dfa5e-742c-4d0b-bb66-2185461f4582", + # "uuid": "fc4dfa5e-742c-4d0b-bb66-2185461f4582", + # "key": "modulus_loss", + # "type": "value", + # "value": 5.0, + # "unit": "GPa", + # "computation": [ + # { + # "node": [ + # "Computation" + # ], + # "uid": "_:9ddda2c0-ff8c-4ce3-beb0-e0cafb6169ef", + # "uuid": "9ddda2c0-ff8c-4ce3-beb0-e0cafb6169ef", + # "name": "my computation name", + # "type": "analysis", + # "citation": [] + # } + # ] + # } + # ], + # "bigsmiles": "123456" + # } + + # material4 = cript.load_nodes_from_json(json.dumps(material4_dict)) + # assert json.loads(material.json) == json.loads(material4.json) + + +def test_json_error(complex_parameter_node): + parameter = complex_parameter_node + # Let's break the node by violating the data model + parameter._json_attrs = replace(parameter._json_attrs, value="abc") + with pytest.raises(CRIPTNodeSchemaError): + parameter.validate() + # Let's break it completely + parameter._json_attrs = None + with pytest.raises(CRIPTJsonSerializationError): + parameter.json + + +def test_local_search(simple_algorithm_node, complex_parameter_node): + a = simple_algorithm_node + # Check if we can use search to find the algorithm node, but specifying node and key + find_algorithms = a.find_children({"node": "Algorithm", "key": "mc_barostat"}) + assert find_algorithms == [a] + # Check if it correctly exclude the algorithm if key is specified to non-existent value + find_algorithms = a.find_children({"node": "Algorithm", "key": "mc"}) + assert find_algorithms == [] + + # Adding 2 separate parameters to test deeper search + p1 = complex_parameter_node + p2 = copy.deepcopy(complex_parameter_node) + p2.key = "damping_time" + p2.value = 15.0 + p2.unit = "m" + a.parameter += [p1, p2] + + # Test if we can find a specific one of the parameters + find_parameter = a.find_children({"key": "damping_time"}) + assert find_parameter == [p2] + + # Test to find the other parameter + find_parameter = a.find_children({"key": "update_frequency"}) + assert find_parameter == [p1] + + # Test if correctly find no parameter if we are searching for a non-existent parameter + find_parameter = a.find_children({"key": "update"}) + assert find_parameter == [] + + # Test nested search. Here we are looking for any node that has a child node parameter as specified. + find_algorithms = a.find_children({"parameter": {"key": "damping_time"}}) + assert find_algorithms == [a] + # Same as before, but specifying two children that have to be present (AND condition) + find_algorithms = a.find_children({"parameter": [{"key": "damping_time"}, {"key": "update_frequency"}]}) + assert find_algorithms == [a] + + # Test that the main node is correctly excluded if we specify an additionally non-existent parameter + find_algorithms = a.find_children({"parameter": [{"key": "damping_time"}, {"key": "update_frequency"}, {"foo": "bar"}]}) + assert find_algorithms == [] + + +def test_cycles(complex_data_node, simple_computation_node): + # We create a wrong cycle with parameters here. + # TODO replace this with nodes that actually can form a cycle + d = copy.deepcopy(complex_data_node) + c = copy.deepcopy(simple_computation_node) + d.computation += [c] + # Using input and output data guarantees a cycle here. + c.output_data += [d] + c.input_data += [d] + + # # Test the repetition of a citation. + # # Notice that we do not use a deepcopy here, as we want the citation to be the exact same node. + # citation = d.citation[0] + # # c._json_attrs.citation.append(citation) + # c.citation += [citation] + # # print(c.get_json(indent=2).json) + # # c.validate() + + # Generate json with an implicit cycle + c.json + d.json + + +def test_uid_serial(simple_inventory_node): + simple_inventory_node.material += simple_inventory_node.material + json_dict = json.loads(simple_inventory_node.get_json(condense_to_uuid={}).json) + assert len(json_dict["material"]) == 4 + assert isinstance(json_dict["material"][2]["uid"], str) + assert json_dict["material"][2]["uid"].startswith("_:") + assert len(json_dict["material"][2]["uid"]) == len(get_new_uid()) + assert isinstance(json_dict["material"][3]["uid"], str) + assert json_dict["material"][3]["uid"].startswith("_:") + assert len(json_dict["material"][3]["uid"]) == len(get_new_uid()) + assert json_dict["material"][3]["uid"] != json_dict["material"][2]["uid"] + + +def test_invalid_json_load(): + def raise_node_dict(node_dict): + node_str = json.dumps(node_dict) + with pytest.raises(CRIPTJsonNodeError): + cript.load_nodes_from_json(node_str) + + node_dict = {"node": "Computation"} + raise_node_dict(node_dict) + node_dict = {"node": []} + raise_node_dict(node_dict) + node_dict = {"node": ["asdf", "asdf"]} + raise_node_dict(node_dict) + node_dict = {"node": [None]} + raise_node_dict(node_dict) + + +def test_invalid_project_graphs(simple_project_node, simple_material_node, simple_process_node, simple_property_node, simple_data_node, simple_computation_node, simple_computation_process_node): + project = copy.deepcopy(simple_project_node) + process = copy.deepcopy(simple_process_node) + material = copy.deepcopy(simple_material_node) + + ingredient = cript.Ingredient(material=material, quantity=[cript.Quantity(key="mass", value=1.23, unit="kg")]) + process.ingredient += [ingredient] + + # Add the process to the experiment, but not in inventory or materials + # Invalid graph + project.collection[0].experiment[0].process += [process] + with pytest.raises(CRIPTOrphanedMaterialError): + project.validate() + + # First fix add material to inventory + project.collection[0].inventory += [cript.Inventory("test_inventory", material=[material])] + project.validate() + # Reverse this fix + project.collection[0].inventory = [] + with pytest.raises(CRIPTOrphanedMaterialError): + project.validate() + + # Fix by add to the materials list instead. + # Using the util helper function for this. + cript.add_orphaned_nodes_to_project(project, active_experiment=None, max_iteration=10) + project.validate() + + # Now add an orphan process to the graph + process2 = copy.deepcopy(simple_process_node) + process.prerequisite_process += [process2] + with pytest.raises(CRIPTOrphanedProcessError): + project.validate() + + # Wrong fix it helper node + dummy_experiment = copy.deepcopy(project.collection[0].experiment[0]) + with pytest.raises(RuntimeError): + cript.add_orphaned_nodes_to_project(project, dummy_experiment) + # Problem still persists + with pytest.raises(CRIPTOrphanedProcessError): + project.validate() + # Fix by using the helper function correctly + cript.add_orphaned_nodes_to_project(project, project.collection[0].experiment[0], 10) + project.validate() + + # We add property to the material, because that adds the opportunity for orphaned data and computation + property = copy.deepcopy(simple_property_node) + material.property += [property] + project.validate() + # Now add an orphan data + data = copy.deepcopy(simple_data_node) + property.data = [data] + with pytest.raises(CRIPTOrphanedDataError): + project.validate() + # Fix with the helper function + cript.add_orphaned_nodes_to_project(project, project.collection[0].experiment[0], 10) + project.validate() + + # Add an orphan Computation + computation = copy.deepcopy(simple_computation_node) + property.computation += [computation] + with pytest.raises(CRIPTOrphanedComputationError): + project.validate() + # Fix with the helper function + cript.add_orphaned_nodes_to_project(project, project.collection[0].experiment[0], 10) + project.validate() + + # Add orphan computational process + comp_proc = copy.deepcopy(simple_computation_process_node) + data.computation_process += [comp_proc] + with pytest.raises(CRIPTOrphanedComputationalProcessError): + while True: + try: # Do trigger not orphan materials + project.validate() + except CRIPTOrphanedMaterialError as exc: + project._json_attrs.material.append(exc.orphaned_node) + except CRIPTOrphanedProcessError as exc: + project.collection[0].experiment[0]._json_attrs.process.append(exc.orphaned_node) + else: + break + + cript.add_orphaned_nodes_to_project(project, project.collection[0].experiment[0], 10) + project.validate() diff --git a/tests/util.py b/tests/util.py new file mode 100644 index 000000000..23f056e98 --- /dev/null +++ b/tests/util.py @@ -0,0 +1,21 @@ +import copy + + +def strip_uid_from_dict(node_dict): + """ + Remove "uid" attributes from nested dictionaries. + Helpful for test purposes, since uids are always going to differ. + """ + node_dict_copy = copy.deepcopy(node_dict) + for key in node_dict: + if key in ("uid", "uuid"): + del node_dict_copy[key] + if isinstance(node_dict, str): + continue + if isinstance(node_dict[key], dict): + node_dict_copy[key] = strip_uid_from_dict(node_dict[key]) + elif isinstance(node_dict[key], list): + for i, element in enumerate(node_dict[key]): + if isinstance(element, dict): + node_dict_copy[key][i] = strip_uid_from_dict(element) + return node_dict_copy diff --git a/trunk b/trunk new file mode 100755 index 000000000..7c1bf72af --- /dev/null +++ b/trunk @@ -0,0 +1,442 @@ +#!/bin/bash + +############################################################################### +# # +# Setup # +# # +############################################################################### + +set -euo pipefail + +readonly TRUNK_LAUNCHER_VERSION="1.2.5" # warning: this line is auto-updated + +readonly SUCCESS_MARK="\033[0;32m✔\033[0m" +readonly FAIL_MARK="\033[0;31m✘\033[0m" +readonly PROGRESS_MARKS=("⡿" "⢿" "⣻" "⣽" "⣾" "⣷" "⣯" "⣟") + +# This is how mktemp(1) decides where to create stuff in tmpfs. +readonly TMPDIR="${TMPDIR:-/tmp}" + +KERNEL=$(uname | tr "[:upper:]" "[:lower:]") +if [[ ${KERNEL} == mingw64* || ${KERNEL} == msys* ]]; then + KERNEL="mingw" +fi +readonly KERNEL + +MACHINE=$(uname -m) +readonly MACHINE + +PLATFORM="${KERNEL}-${MACHINE}" +readonly PLATFORM + +PLATFORM_UNDERSCORE="${KERNEL}_${MACHINE}" +readonly PLATFORM_UNDERSCORE + +# https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_(Control_Sequence_Introducer)_sequences +# [nF is "cursor previous line" and moves to the beginning of the nth previous line +# [0K is "erase display" and clears from the cursor to the end of the screen +readonly CLEAR_LAST_MSG="\033[1F\033[0K" + +if [[ ! -z ${CI:-} && "${CI}" = true && -z ${TRUNK_LAUNCHER_QUIET:-} ]]; then + TRUNK_LAUNCHER_QUIET=1 +else + TRUNK_LAUNCHER_QUIET=${TRUNK_LAUNCHER_QUIET:-${TRUNK_QUIET:-false}} +fi + +readonly TRUNK_LAUNCHER_DEBUG + +if [[ ${TRUNK_LAUNCHER_QUIET} != false ]]; then + exec 3>&1 4>&2 &>/dev/null +fi + +TRUNK_CACHE="${TRUNK_CACHE:-}" +if [[ -n ${TRUNK_CACHE} ]]; then + : +elif [[ -n ${XDG_CACHE_HOME:-} ]]; then + TRUNK_CACHE="${XDG_CACHE_HOME}/trunk" +else + TRUNK_CACHE="${HOME}/.cache/trunk" +fi +readonly TRUNK_CACHE +readonly CLI_DIR="${TRUNK_CACHE}/cli" +mkdir -p "${CLI_DIR}" + +# platform check +readonly MINIMUM_MACOS_VERSION="10.15" +check_darwin_version() { + local osx_version + osx_version="$(sw_vers -productVersion)" + + # trunk-ignore-begin(shellcheck/SC2312): the == will fail if anything inside the $() fails + if [[ "$(printf "%s\n%s\n" "${MINIMUM_MACOS_VERSION}" "${osx_version}" | + sort --version-sort | + head -n 1)" == "${MINIMUM_MACOS_VERSION}"* ]]; then + return + fi + # trunk-ignore-end(shellcheck/SC2312) + + echo -e "${FAIL_MARK} Trunk requires at least MacOS ${MINIMUM_MACOS_VERSION}" \ + "(yours is ${osx_version}). See https://docs.trunk.io for more info." + exit 1 +} + +if [[ ${PLATFORM} == "darwin-x86_64" || ${PLATFORM} == "darwin-arm64" ]]; then + check_darwin_version +elif [[ ${PLATFORM} == "linux-x86_64" || ${PLATFORM} == "windows-x86_64" || ${PLATFORM} == "mingw-x86_64" ]]; then + : +else + echo -e "${FAIL_MARK} Trunk is only supported on Linux (x64_64), MacOS (x86_64, arm64), and Windows (x86_64)." \ + "See https://docs.trunk.io for more info." + exit 1 +fi + +TRUNK_TMPDIR="${TMPDIR}/trunk-$( + set -e + id -u +)/launcher_logs" +readonly TRUNK_TMPDIR +mkdir -p "${TRUNK_TMPDIR}" + +# For the `mv $TOOL_TMPDIR/trunk $TOOL_DIR` to be atomic (i.e. just inode renames), the source and destination filesystems need to be the same +TOOL_TMPDIR=$(mktemp -d "${CLI_DIR}/tmp.XXXXXXXXXX") +readonly TOOL_TMPDIR + +cleanup() { + rm -rf "${TOOL_TMPDIR}" + if [[ $1 == "0" ]]; then + rm -rf "${TRUNK_TMPDIR}" + fi +} +trap 'cleanup $?' EXIT + +# e.g. 2022-02-16-20-40-31-0800 +dt_str() { date +"%Y-%m-%d-%H-%M-%S%z"; } + +LAUNCHER_TMPDIR="${TOOL_TMPDIR}/launcher" +readonly LAUNCHER_TMPDIR +mkdir -p "${LAUNCHER_TMPDIR}" + +if [[ -n ${TRUNK_LAUNCHER_DEBUG:-} ]]; then + set -x +fi + +# launcher awk +# +# BEGIN{ORS="";} +# use "" as the output record separator +# ORS defaults to "\n" for bwk, which results in +# $(printf "foo bar" | awk '{print $2}') == "bar\n" +# +# {gsub(/\r/, "", $0)} +# for every input record (i.e. line), the regex "\r" should be replaced with "" +# This is necessary to handle CRLF files in a portable fashion. +# +# Some StackOverflow answers suggest using RS="\r?\n" to handle CRLF files (RS is the record +# separator, i.e. the line delimiter); unfortunately, original-awk only allows single-character +# values for RS (see https://www.gnu.org/software/gawk/manual/gawk.html#awk-split-records). +lawk() { + awk 'BEGIN{ORS="";}{gsub(/\r/, "", $0)}'"${1}" "${@:2}" +} +awk_test() { + # trunk-ignore-begin(shellcheck/SC2310,shellcheck/SC2312) + # SC2310 and SC2312 are about set -e not propagating to the $(); if that happens, the string + # comparison will fail and we'll claim the user's awk doesn't work + if [[ $( + set -e + printf 'k1: v1\n \tk2: v2\r\n' | lawk '/[ \t]+k2:/{print $2}' + ) == 'v2' && + $( + set -e + printf 'k1: v1\r\n\t k2: v2\r\n' | lawk '/[ \t]+k2:/{print $2}' + ) == 'v2' ]]; then + return + fi + # trunk-ignore-end(shellcheck/SC2310,shellcheck/SC2312) + + echo -e "${FAIL_MARK} Trunk does not work with your awk;" \ + "please report this at https://slack.trunk.io." + echo -e "Your version of awk is:" + awk --version || awk -Wversion + exit 1 +} +awk_test + +readonly CURL_FLAGS="${CURL_FLAGS:- -vvv --max-time 120 --retry 3 --fail}" +readonly WGET_FLAGS="${WGET_FLAGS:- --verbose --tries=3 --limit-rate=10M}" +TMP_DOWNLOAD_LOG="${TRUNK_TMPDIR}/download-$( + set -e + dt_str +).log" +readonly TMP_DOWNLOAD_LOG + +# Detect whether we should use wget or curl. +if command -v wget &>/dev/null; then + download_cmd() { + local url="${1}" + local output_to="${2}" + # trunk-ignore-begin(shellcheck/SC2312): we don't care if wget --version errors + cat >>"${TMP_DOWNLOAD_LOG}" <&1) + +EOF + # trunk-ignore-end(shellcheck/SC2312) + + # Support BusyBox wget + if wget --help 2>&1 | grep BusyBox; then + wget "${url}" -O "${output_to}" 2>>"${TMP_DOWNLOAD_LOG}" & + else + # trunk-ignore(shellcheck/SC2086): we deliberately don't quote WGET_FLAGS + wget ${WGET_FLAGS} "${url}" --output-document "${output_to}" 2>>"${TMP_DOWNLOAD_LOG}" & + fi + } +elif command -v curl &>/dev/null; then + download_cmd() { + local url="${1}" + local output_to="${2}" + # trunk-ignore-begin(shellcheck/SC2312): we don't care if curl --version errors + cat >>"${TMP_DOWNLOAD_LOG}" <>"${TMP_DOWNLOAD_LOG}" & + } +else + download_cmd() { + echo -e "${FAIL_MARK} Cannot download '${url}'; please install curl or wget." + exit 1 + } +fi + +download_url() { + local url="${1}" + local output_to="${2}" + local progress_message="${3:-}" + + if [[ -n ${progress_message} ]]; then + echo -e "${PROGRESS_MARKS[0]} ${progress_message}..." + fi + + download_cmd "${url}" "${output_to}" + local download_pid="$!" + + local i_prog=0 + while [[ -d "/proc/${download_pid}" && -n ${progress_message} ]]; do + echo -e "${CLEAR_LAST_MSG}${PROGRESS_MARKS[${i_prog}]} ${progress_message}..." + sleep 0.2 + i_prog=$(((i_prog + 1) % ${#PROGRESS_MARKS[@]})) + done + + local download_log + if ! wait "${download_pid}"; then + download_log="${TRUNK_TMPDIR}/launcher-download-$( + set -e + dt_str + ).log" + mv "${TMP_DOWNLOAD_LOG}" "${download_log}" + echo -e "${CLEAR_LAST_MSG}${FAIL_MARK} ${progress_message}... FAILED (see ${download_log})" + echo -e "Please check your connection and try again." \ + "If you continue to see this error message," \ + "consider reporting it to us at https://slack.trunk.io." + exit 1 + fi + + if [[ -n ${progress_message} ]]; then + echo -e "${CLEAR_LAST_MSG}${SUCCESS_MARK} ${progress_message}... done" + fi + +} + +# sha256sum is in coreutils, so we prefer that over shasum, which is installed with perl +if command -v sha256sum &>/dev/null; then + : +elif command -v shasum &>/dev/null; then + sha256sum() { shasum -a 256 "$@"; } +else + sha256sum() { + echo -e "${FAIL_MARK} Cannot compute sha256; please install sha256sum or shasum" + exit 1 + } +fi + +############################################################################### +# # +# CLI resolution functions # +# # +############################################################################### + +trunk_yaml_abspath() { + local repo_head + local cwd + + if repo_head=$(git rev-parse --show-toplevel 2>/dev/null); then + echo "${repo_head}/.trunk/trunk.yaml" + elif [[ -f .trunk/trunk.yaml ]]; then + cwd="$(pwd)" + echo "${cwd}/.trunk/trunk.yaml" + else + echo "" + fi +} + +read_cli_version_from() { + local config_abspath="${1}" + local cli_version + + cli_version="$( + set -e + lawk '/[ \t]+version:/{print $2; exit;}' "${config_abspath}" + )" + if [[ -z ${cli_version} ]]; then + echo -e "${FAIL_MARK} Invalid .trunk/trunk.yaml, no cli version found." \ + "See https://docs.trunk.io for more info." >&2 + exit 1 + fi + + echo "${cli_version}" +} + +download_cli() { + local dl_version="${1}" + local expected_sha256="${2}" + local actual_sha256 + + readonly TMP_INSTALL_DIR="${LAUNCHER_TMPDIR}/install" + mkdir -p "${TMP_INSTALL_DIR}" + + TRUNK_NEW_URL_VERSION=0.10.2-beta.1 + if sort --help 2>&1 | grep BusyBox; then + readonly URL="https://trunk.io/releases/${dl_version}/trunk-${dl_version}-${PLATFORM}.tar.gz" + else + if [[ "$(printf "%s\n%s\n" "${TRUNK_NEW_URL_VERSION}" "${dl_version}" | + sort --version-sort | + head -n 1 || true)" == "${TRUNK_NEW_URL_VERSION}"* ]]; then + readonly URL="https://trunk.io/releases/${dl_version}/trunk-${dl_version}-${PLATFORM}.tar.gz" + else + readonly URL="https://trunk.io/releases/trunk-${dl_version}.${KERNEL}.tar.gz" + fi + fi + + readonly DOWNLOAD_TAR_GZ="${TMP_INSTALL_DIR}/download-${dl_version}.tar.gz" + + download_url "${URL}" "${DOWNLOAD_TAR_GZ}" "Downloading Trunk ${dl_version}" + + if [[ -n ${expected_sha256:-} ]]; then + local verifying_text="Verifying Trunk sha256..." + echo -e "${PROGRESS_MARKS[0]} ${verifying_text}" + + actual_sha256="$( + set -e + sha256sum "${DOWNLOAD_TAR_GZ}" | lawk '{print $1}' + )" + + if [[ ${actual_sha256} != "${expected_sha256}" ]]; then + echo -e "${CLEAR_LAST_MSG}${FAIL_MARK} ${verifying_text} FAILED" + echo "Expected sha256: ${expected_sha256}" + echo " Actual sha256: ${actual_sha256}" + exit 1 + fi + + echo -e "${CLEAR_LAST_MSG}${SUCCESS_MARK} ${verifying_text} done" + fi + + local unpacking_text="Unpacking Trunk..." + echo -e "${PROGRESS_MARKS[0]} ${unpacking_text}" + tar --strip-components=1 -C "${TMP_INSTALL_DIR}" -xf "${DOWNLOAD_TAR_GZ}" + echo -e "${CLEAR_LAST_MSG}${SUCCESS_MARK} ${unpacking_text} done" + + rm -f "${DOWNLOAD_TAR_GZ}" + mkdir -p "${TOOL_DIR}" + readonly OLD_TOOL_DIR="${CLI_DIR}/${version}" + # Create a backwards compatability link for old versions of trunk that want to write their + # crashpad_handlers to that dir. + if [[ ! -e ${OLD_TOOL_DIR} ]]; then + ln -sf "${TOOL_PART}" "${OLD_TOOL_DIR}" + fi + mv -n "${TMP_INSTALL_DIR}/trunk" "${TOOL_DIR}/" || true + rm -rf "${TMP_INSTALL_DIR}" +} + +############################################################################### +# # +# CLI resolution # +# # +############################################################################### + +CONFIG_ABSPATH="$( + set -e + trunk_yaml_abspath +)" +readonly CONFIG_ABSPATH + +version="${TRUNK_CLI_VERSION:-}" +if [[ -n ${version:-} ]]; then + : +elif [[ -f ${CONFIG_ABSPATH} ]]; then + version="$( + set -e + read_cli_version_from "${CONFIG_ABSPATH}" + )" + version_sha256="$( + set -e + lawk "/${PLATFORM_UNDERSCORE}:/"'{print $2}' "${CONFIG_ABSPATH}" + )" +else + readonly LATEST_FILE="${LAUNCHER_TMPDIR}/latest" + download_url "https://trunk.io/releases/latest" "${LATEST_FILE}" + version=$( + set -e + lawk '/version:/{print $2}' "${LATEST_FILE}" + ) + version_sha256=$( + set -e + lawk "/${PLATFORM_UNDERSCORE}:/"'{print $2}' "${LATEST_FILE}" + ) +fi + +readonly TOOL_PART="${version}-${PLATFORM}" +readonly TOOL_DIR="${CLI_DIR}/${TOOL_PART}" + +if [[ ! -e ${TOOL_DIR}/trunk ]]; then + download_cli "${version}" "${version_sha256:-}" + echo # add newline between launcher and CLI output +fi + +if [[ ${TRUNK_LAUNCHER_QUIET} != false ]]; then + exec 1>&3 3>&- 2>&4 4>&- +fi + +############################################################################### +# # +# CLI invocation # +# # +############################################################################### + +if [[ -n ${LATEST_FILE:-} ]]; then + mv -n "${LATEST_FILE}" "${TOOL_DIR}/version" >/dev/null 2>&1 || true +fi + +# NOTE: exec will overwrite the process image, so trap will not catch the exit signal. +# Therefore, run cleanup manually here. +cleanup 0 + +exec \ + env TRUNK_LAUNCHER_VERSION="${TRUNK_LAUNCHER_VERSION}" \ + env TRUNK_LAUNCHER_PATH="${BASH_SOURCE[0]}" \ + "${TOOL_DIR}/trunk" "$@"