-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Update OpenAgua, HyroShare, and WEAP connections
- Loading branch information
Adel M. Abdallah
committed
Jan 17, 2021
1 parent
1acc066
commit f8f5a83
Showing
59 changed files
with
4,851 additions
and
1,331 deletions.
There are no files selected for viewing
Binary file not shown.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,140 @@ | ||
|
||
import pandas as pd | ||
import sqlite3 | ||
|
||
WaMDaM_SQLite_Name='WEAP.sqlite' | ||
|
||
# WaMDaM_SQLite_Name='replicateWaMDaM.sqlite' | ||
|
||
|
||
conn = sqlite3.connect(WaMDaM_SQLite_Name) | ||
|
||
WaMDaM_SQLite_Name='BearRiverDatasets_August_2018.sqlite' | ||
|
||
|
||
sql_Multi_colums = """ | ||
SELECT DISTINCT "ObjectTypes"."ObjectType", | ||
"Instances"."InstanceName", | ||
ScenarioName,"Attributes"."AttributeName" AS Multi_AttributeName, | ||
Methods.MethodName,Sources.SourceName, | ||
"AttributesColumns"."AttributeName" AS "Sub_AttributeName", | ||
"DataValue","ValueOrder" | ||
FROM "ResourceTypes" | ||
-- Join the ResourceType to get its Object Types | ||
LEFT JOIN "ObjectTypes" | ||
ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" | ||
-- Join the Object types to get their attributes | ||
LEFT JOIN "Attributes" | ||
ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" | ||
-- Join the Attributes to get their Mappings | ||
LEFT JOIN "Mappings" | ||
ON Mappings.AttributeID= Attributes.AttributeID | ||
-- Join the Mappings to get their Instances | ||
LEFT JOIN "Instances" | ||
ON "Instances"."InstanceID"="Mappings"."InstanceID" | ||
-- Join the Mappings to get their ScenarioMappings | ||
LEFT JOIN "ScenarioMappings" | ||
ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" | ||
-- Join the ScenarioMappings to get their Scenarios | ||
LEFT JOIN "Scenarios" | ||
ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" | ||
-- Join the Scenarios to get their MasterNetworks | ||
LEFT JOIN "MasterNetworks" | ||
ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" | ||
-- Join the Mappings to get their Methods | ||
LEFT JOIN "Methods" | ||
ON "Methods"."MethodID"="Mappings"."MethodID" | ||
-- Join the Mappings to get their Sources | ||
LEFT JOIN "Sources" | ||
ON "Sources"."SourceID"="Mappings"."SourceID" | ||
-- Join the Mappings to get their ValuesMappers | ||
LEFT JOIN "ValuesMapper" | ||
ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" | ||
-- Join the ValuesMapper to get their MultiAttributeSeries | ||
LEFT JOIN "MultiAttributeSeries" | ||
ON "MultiAttributeSeries" ."ValuesMapperID"="ValuesMapper"."ValuesMapperID" | ||
/*This is an extra join to get to each column name within the MultiColumn Array */ | ||
-- Join the MultiAttributeSeries to get to their specific ValuesMapper, now called ValuesMapperColumn | ||
LEFT JOIN "ValuesMapper" As "ValuesMapperColumn" | ||
ON "ValuesMapperColumn"."ValuesMapperID"="MultiAttributeSeries"."MappingID_Attribute" | ||
-- Join the ValuesMapperColumn to get back to their specific Mapping, now called MappingColumns | ||
LEFT JOIN "Mappings" As "MappingColumns" | ||
ON "MappingColumns"."ValuesMapperID"="ValuesMapperColumn"."ValuesMapperID" | ||
-- Join the MappingColumns to get back to their specific Attribute, now called AttributeColumns | ||
LEFT JOIN "Attributes" AS "AttributesColumns" | ||
ON "AttributesColumns"."AttributeID"="MappingColumns"."AttributeID" | ||
/* Finishes here */ | ||
-- Join the MultiAttributeSeries to get access to their MultiAttributeSeriesValues | ||
LEFT JOIN "MultiAttributeSeriesValues" | ||
ON "MultiAttributeSeriesValues"."MultiAttributeSeriesID"="MultiAttributeSeries"."MultiAttributeSeriesID" | ||
-- Select one InstanceName and restrict the query AttributeDataTypeCV that is MultiAttributeSeries | ||
WHERE Attributes.AttributeDataTypeCV='MultiAttributeSeries' and DataValue is not "" and DataValue is not null | ||
--AND Multi_AttributeName='wsi_par' | ||
AND "ResourceTypeAcronym"="WASH" | ||
AND "MasterNetworkName"= "Lower Bear River Network" | ||
AND "ScenarioName" ="base case scenario 2003" | ||
ORDER BY InstanceName, ScenarioName,Multi_AttributeName,Sub_AttributeName,ValueOrder ASC | ||
""" | ||
|
||
|
||
Multi_colums_result_df=pd.read_sql( sql_Multi_colums , conn) | ||
|
||
|
||
|
||
subsets = Multi_colums_result_df.groupby(['ObjectType', 'InstanceName', 'Multi_AttributeName']) | ||
|
||
for subset in subsets.groups.keys(): | ||
dt = subsets.get_group(name=subset) | ||
attr_name = dt['Multi_AttributeNameAttributeName'].values[3] # Big attribute | ||
AttributeNameNos = dt['Sub_AttributeName'] # all small attributes | ||
|
||
for subset in subsets.groups.keys(): | ||
dt = subsets.get_group(name=subset) | ||
AttributeName1_Values=dt['AttributeName1_Values'] | ||
AttributeName2_Values=dt['AttributeName2_Values'] | ||
|
||
ObjectType = dt['ObjectType'].values[0] | ||
InstanceName = dt['InstanceName'].values[1] | ||
attr_name = dt['AttributeName'].values[3] | ||
|
||
if (InstanceName, attr_name) in dict_res_attr.keys(): | ||
Attr_name = multiAttr_sheet_bottom_df.values[k][3] | ||
ObjectType = multiAttr_sheet_bottom_df.values[k][0] | ||
dimension = Dataset_attr_Name_Dim_list[ObjectType,Attr_name] | ||
|
||
|
||
rs_multi = {'resource_attr_id': dict_res_attr[(multiAttr_sheet_bottom_df.values[k][1], multiAttr_sheet_bottom_df.values[k][3])]['id']} | ||
|
||
dataset = {'type': 'array', 'name': multiAttr_sheet_bottom_df.values[k][3], 'unit': attr_unit, 'dimension': dimension, | ||
'metadata': json.dumps(metadata, ensure_ascii=True), | ||
'hidden': 'N', 'value': json.dumps(array_value)} | ||
rs_multi['value'] = dataset | ||
list_rs_multi.append(rs_multi) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,18 +5,26 @@ | |
"""""" | ||
|
||
""" | ||
WaMDaM: The Water Management Data Model Version 1.06 | ||
WaMDaM: The Water Management Data Model Version 1.08 | ||
See the schema at http://schema.wamdam.org/diagrams/01_WaMDaM.html | ||
Instructions at http://docs.wamdam.org/Getting_started/Steps | ||
Adel M. Abdallah | ||
email: [email protected] | ||
website: http://adelmabdallah.com/ | ||
Feb 2019 | ||
Dec 2020 | ||
""" | ||
|
||
''' | ||
to create a new .exe, type this in the terminal | ||
pyinstaller --clean --icon=WaMDaM_Wizard.ico --onedir --noconfirm --noupx --onefile --windowed wamdam.py | ||
Please write the pyinstaller command here. specific version | ||
''' | ||
|
||
""" | ||
Wamdam.py is the calling/initializing function for the Wizard. | ||
|
@@ -41,6 +49,7 @@ | |
dlg_ImportSpreadsheetBasic.py which exist in the viewer Folder. Then when the user | ||
clicks at "load Data" in this dialog box, a function calls one or all the sub-data importers | ||
for the checked boxes from the controller folder: | ||
stp0_loadCVs.py, stp1_loadMetadata.py, | ||
stp2_loadDataStructure.py, | ||
stp3_loadNetworks.py, | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,32 +1,65 @@ | ||
import sqlite3 | ||
import numpy as np | ||
import pandas as pd | ||
import getpass | ||
from hs_restclient import HydroShare, HydroShareAuthBasic | ||
import os | ||
import json | ||
|
||
import plotly | ||
username = 'amabdallah' | ||
|
||
from plotly.graph_objs import * | ||
# username = 'adelabdallah' | ||
|
||
import os | ||
import csv | ||
from collections import OrderedDict | ||
import sqlite3 | ||
import pandas as pd | ||
import numpy as np | ||
from IPython.display import display, Image, SVG, Math, YouTubeVideo | ||
import urllib | ||
password = '' | ||
|
||
print 'The needed Python libraries have been imported' | ||
auth = HydroShareAuthBasic(username=username, password=password) | ||
|
||
hs = HydroShare(auth=auth) | ||
|
||
username = 'amabdallah' | ||
password = 'MyHydroShareWorld' | ||
resource_type = 'CompositeResource' | ||
|
||
auth = HydroShareAuthBasic(username=username, password=password) | ||
abstract = 'My abstract' | ||
|
||
hs = HydroShare(auth=auth) | ||
print hs | ||
for resource in hs.resources(): | ||
print 'Connected to HydroShare' | ||
title = 'new tes5255t' | ||
|
||
keywords = ('my keyword 1', 'my keyword 2') | ||
rtype = 'CompositeResource' | ||
|
||
fpath = 'C:\Users\Rosenberg\Desktop\Ecosystem/WASH3.sqlite' | ||
|
||
metadata = '[{"coverage":{"type":"period", "value":{"start":"01/01/2000", "end":"12/12/2010"}}}, {"creator":{"name":"John Smith"}}, {"creator":{"name":"Lisa Miller"}}]' | ||
|
||
|
||
resource_id = hs.createResource(resource_type, title, resource_file=fpath, keywords=keywords, abstract=abstract, | ||
metadata=metadata) | ||
|
||
SqliteName='WASH3.sqlite' | ||
|
||
params = {} | ||
|
||
params['temporal_coverage'] = {"start": '2000', "end": '2003'} | ||
|
||
params['title'] = SqliteName | ||
|
||
params['spatial_coverage'] = { | ||
"type": "box", | ||
|
||
"units": "Decimal degrees", | ||
|
||
"eastlimit": float(-110.8200), # -110.8200, | ||
|
||
"northlimit": float(42.8480), # 42.8480, | ||
|
||
"southlimit": float(40.7120), # 40.7120, | ||
|
||
"westlimit": float(-113.0000), # -113.0000, | ||
|
||
"name": "12232", | ||
|
||
"projection": "WGS 84 EPSG:4326" | ||
} | ||
|
||
# add metadata to the SQLite file | ||
options = {"file_path": SqliteName, "hs_file_type": "SingleFile"} | ||
# print options | ||
|
||
result = hs.resource(resource_id).functions.set_file_type(options) | ||
|
||
|
||
|
||
spatial = hs.resource(resource_id).files.metadata(SqliteName, params) |
Oops, something went wrong.