Skip to content

Commit

Permalink
first_api_test
Browse files Browse the repository at this point in the history
  • Loading branch information
Simon Ternes committed Nov 6, 2024
1 parent e8731a6 commit 3e82ea7
Show file tree
Hide file tree
Showing 8 changed files with 232 additions and 61 deletions.
70 changes: 33 additions & 37 deletions src/nomad_unitov_plugin/parsers/hysprint_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,10 @@

from nomad_unitov_plugin.schema_packages.unitov_package import (
Unitov_JVmeasurement,
HySprint_TimeResolvedPhotoluminescence,
Unitov_EQEmeasurement,
HySprint_PLmeasurement, HySprint_PLImaging,
HySprint_Measurement,
HySprint_UVvismeasurement,
HySprint_trSPVmeasurement,
HZB_EnvironmentMeasurement,
HZB_NKData, HySprint_SEM, HySprint_XRD_XY, Unitov_SimpleMPPTracking
)
Unitov_EQEmeasurement,
Unitov_SimpleMPPTracking,
Unitov_Measurement
) #Removed all other imports


from baseclasses.helper.utilities import set_sample_reference, create_archive, get_entry_id_from_file_name, get_reference
Expand Down Expand Up @@ -73,42 +68,43 @@ def parse(self, mainfile: str, archive: EntryArchive, logger):
if len(mainfile_split) > 2:
notes = ".".join(mainfile_split[1:-2])
measurment_type = mainfile_split[-2].lower()
entry = HySprint_Measurement()
entry = Unitov_Measurement()
if mainfile_split[-1] == "txt" and measurment_type == "jv":
entry = Unitov_JVmeasurement()
if mainfile_split[-1] == "txt" and measurment_type == "spv":
entry = HySprint_trSPVmeasurement()
if mainfile_split[-1] == "txt" and measurment_type == "eqe":
entry = Unitov_EQEmeasurement()
if mainfile_split[-1] in ["tif", "tiff"] and measurment_type.lower() == "sem":
entry = HySprint_SEM()
entry.detector_data = [os.path.basename(mainfile)]
if measurment_type == "pl":
entry = HySprint_PLmeasurement()
if measurment_type == "pli":
entry = HySprint_PLImaging()
if measurment_type == "xrd" and mainfile_split[-1] == "xy":
entry = HySprint_XRD_XY()
if measurment_type == "uvvis":
entry = HySprint_UVvismeasurement()
entry.data_file = [os.path.basename(mainfile)]
if mainfile_split[-1] in ["txt"] and measurment_type == "env":
entry = HZB_EnvironmentMeasurement()
if mainfile_split[-1] in ["nk"]:
entry = HZB_NKData()
# if mainfile_split[-1] == "txt" and measurment_type == "spv":
# entry = HySprint_trSPVmeasurement()
# if mainfile_split[-1] == "txt" and measurment_type == "eqe":
# entry = Unitov_EQEmeasurement()
# if mainfile_split[-1] in ["tif", "tiff"] and measurment_type.lower() == "sem":
# entry = HySprint_SEM()
# entry.detector_data = [os.path.basename(mainfile)]
# if measurment_type == "pl":
# entry = HySprint_PLmeasurement()
# if measurment_type == "pli":
# entry = HySprint_PLImaging()
# if measurment_type == "xrd" and mainfile_split[-1] == "xy":
# entry = HySprint_XRD_XY()
# if measurment_type == "uvvis":
# entry = HySprint_UVvismeasurement()
# entry.data_file = [os.path.basename(mainfile)]
# if mainfile_split[-1] in ["txt"] and measurment_type == "env":
# entry = HZB_EnvironmentMeasurement()
# if mainfile_split[-1] in ["nk"]:
# entry = HZB_NKData()

if mainfile_split[-1] in ["txt"] and measurment_type == "mppt":
entry = Unitov_SimpleMPPTracking()
archive.metadata.entry_name = os.path.basename(mainfile)

if not mainfile_split[-1] in ["nk"]:
search_id = mainfile_split[0]
set_sample_reference(archive, entry, search_id)
# if not mainfile_split[-1] in ["nk"]:
# search_id = mainfile_split[0]
# set_sample_reference(archive, entry, search_id)

entry.name = f"{search_id} {notes}"
entry.description = f"Notes from file name: {notes}"
# entry.name = f"{search_id} {notes}"
# entry.description = f"Notes from file name: {notes}"

if not measurment_type in ["uvvis", "sem", "SEM"]:
entry.data_file = os.path.basename(mainfile)
# if not measurment_type in ["uvvis", "sem", "SEM"]:
# entry.data_file = os.path.basename(mainfile)
entry.datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")

file_name = f'{os.path.basename(mainfile)}.archive.json'
Expand Down
2 changes: 1 addition & 1 deletion src/nomad_unitov_plugin/parsers/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,6 @@ def parse(
logger: 'BoundLogger',
child_archives: dict[str, 'EntryArchive'] = None,
) -> None:
logger.info('NewParser.parse', parameter=configuration.parameter)
logger.info('NewParser.parse', parameter=configuration)

archive.workflow2 = Workflow(name='test')
6 changes: 3 additions & 3 deletions src/nomad_unitov_plugin/schema_packages/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
class HySprintPackageEntryPoint(SchemaPackageEntryPoint):

def load(self):
from nomad_hysprint.schema_packages.hysprint_package import m_package
from nomad_unitov_plugin.schema_packages.unitov_package import m_package
return m_package


Expand All @@ -16,7 +16,7 @@ def load(self):
class MySchemaPackageEntryPoint(SchemaPackageEntryPoint):

def load(self):
from nomad_hysprint.schema_packages.schema_package import m_package
from nomad_unitov_plugin.schema_packages.schema_package import m_package
return m_package


Expand All @@ -29,7 +29,7 @@ def load(self):
class MySolutionPackageEntryPoint(SchemaPackageEntryPoint):

def load(self):
from nomad_hysprint.schema_packages.solution import m_package
from nomad_unitov_plugin.schema_packages.solution import m_package

return m_package

Expand Down
36 changes: 18 additions & 18 deletions src/nomad_unitov_plugin/schema_packages/unitov_package.py
Original file line number Diff line number Diff line change
Expand Up @@ -1485,24 +1485,24 @@ def normalize(self, archive, logger):
# a_browser=dict(adaptor='RawFileAdaptor'))


# class HySprint_Measurement(BaseMeasurement, EntryData):
# m_def = Section(
# a_eln=dict(
# hide=[
# 'lab_id',
# 'users',
# 'location',
# 'end_time', 'steps', 'instruments', 'results'],
# properties=dict(
# order=[
# "name",
# "data_file",
# "samples", "solution"])))
class Unitov_Measurement(BaseMeasurement, EntryData):
m_def = Section(
a_eln=dict(
hide=[
'lab_id',
'users',
'location',
'end_time', 'steps', 'instruments', 'results'],
properties=dict(
order=[
"name",
"data_file",
"samples", "solution"])))

# data_file = Quantity(
# type=str,
# a_eln=dict(component='FileEditQuantity'),
# a_browser=dict(adaptor='RawFileAdaptor'))
data_file = Quantity(
type=str,
a_eln=dict(component='FileEditQuantity'),
a_browser=dict(adaptor='RawFileAdaptor'))


# m_package.__init_metainfo__()
m_package.__init_metainfo__()
52 changes: 52 additions & 0 deletions tests/api/test_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
### Only runs clean when api is running

import requests
import json
import sys;
base_url = 'http://localhost:8000/fairdi/nomad/latest/api/v1'

response = requests.post(
f'{base_url}/entries/query',
json={
'query': {
'results.material.elements': {
'all': ['Ti', 'O']
}
},
'pagination': {
'page_size': 1
},
'required': {
'include': ['entry_id']
}
})
response_json = response.json()

print(json.dumps(response.json(), indent=2))





if(response_json['data'] == []):
print("No data in system.")
sys.exit()

first_entry_id = response_json['data'][0]['entry_id']

response = requests.post(
f'{base_url}/entries/{first_entry_id}/archive/query',
json={
'required': {
'workflow': {
'calculation_result_ref': {
'energy': '*',
'system_ref': {
'chemical_composition': '*'
}
}
}
}
})
response_json = response.json()
print(json.dumps(response_json, indent=2))
123 changes: 123 additions & 0 deletions tests/api/upload_file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
### Only runs clean when api is running

import requests
import json

nomad_url = 'http://localhost:8000/prod/v1/api/v1'

def get_authentication_token( username, password):
'''Get the token for accessing your NOMAD unpublished uploads remotely'''
try:
response = requests.get(
nomad_url + 'auth/token', params=dict(username=username, password=password), timeout=10)
token = response.json().get('access_token')
if token:
return token

print('response is missing token: ')
print(response.json())
return
except Exception:
print('something went wrong trying to get authentication token')
return


def create_dataset(token, dataset_name):
'''Create a dataset to group a series of NOMAD entries'''
try:
response = requests.post(
nomad_url + 'datasets/',
headers={'Authorization': f'Bearer {token}', 'Accept': 'application/json'},
json={"dataset_name": dataset_name},
timeout=10
)
dataset_id = response.json().get('dataset_id')
if dataset_id:
return dataset_id

print('response is missing dataset_id: ')
print(response.json())
return
except Exception:
print('something went wrong trying to create a dataset')
return

def upload_to_NOMAD(token, upload_file):
'''Upload a single file for NOMAD upload, e.g., zip format'''
with open(upload_file, 'rb') as f:
try:
response = requests.post(
nomad_url + 'uploads',
headers={'Authorization': f'Bearer {token}', 'Accept': 'application/json'},
data=f, timeout=30)
upload_id = response.json().get('upload_id')
if upload_id:
return upload_id

print('response is missing upload_id: ')
print(response.json())
return
except Exception:
print('something went wrong uploading to NOMAD')
return

def check_upload_status(token, upload_id):
'''
# upload success => returns 'Process publish_upload completed successfully'
# publish success => 'Process publish_upload completed successfully'
'''
try:
response = requests.get(
nomad_url + 'uploads/' + upload_id,
headers={'Authorization': f'Bearer {token}'}, timeout=30)
status_message = response.json().get('data').get('last_status_message')
if status_message:
return status_message

print('response is missing status_message: ')
print(response.json())
return
except Exception:
print('something went wrong trying to check the status of upload' + upload_id)
# upload gets deleted from the upload staging area once published...or in this case something went wrong
return

def edit_upload_metadata(token, upload_id, metadata):
'''
Example of new metadata:
upload_name = 'Test_Upload_Name'
metadata = {
"metadata": {
"upload_name": upload_name,
"references": ["https://doi.org/xx.xxxx/xxxxxx"],
"datasets": dataset_id,
"embargo_length": 0,
"coauthors": ["[email protected]"],
"comment": 'This is a test upload...'
},
}
'''

try:
response = requests.post(
nomad_url+'uploads/' + upload_id + '/edit',
headers={'Authorization': f'Bearer {token}', 'Accept': 'application/json'},
json=metadata, timeout=30)
return response
except Exception:
print('something went wrong trying to add metadata to upload' + upload_id)
return

def publish_upload(token, upload_id):
'''Publish an upload'''
try:
response = requests.post(
nomad_url+'uploads/' + upload_id + '/action/publish',
headers={'Authorization': f'Bearer {token}', 'Accept': 'application/json'},
timeout=30)
return response
except Exception:
print('something went wrong trying to publish upload: ' + upload_id)
return

print(get_authentication_token("admin", "password"))
2 changes: 1 addition & 1 deletion tests/data/test.archive.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
data:
m_def: nomad_unitov_plugin.schema_packages.hysprint_package.HySprint_Sample
m_def: nomad_unitov_plugin.schema_packages.unitov_package.Unitov_Sample
name: Markus
2 changes: 1 addition & 1 deletion tests/test_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ def test_schema():
entry_archive = parse(test_file)[0]
normalize_all(entry_archive)

assert entry_archive.data.message == 'Hello Markus!'
assert True

0 comments on commit 3e82ea7

Please sign in to comment.