Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Don't merge] CI test #1951

Closed
wants to merge 11 commits into from
2 changes: 1 addition & 1 deletion .github/workflows/requirements.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
run: python -m pip install pip-check-reqs

- name: Check extra core requirements
run: pip-extra-reqs -r werkzeug -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin
run: pip-extra-reqs -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin

- name: Check missing SDK requirements
run: >
Expand Down
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Updated Slurm plugin docs to note possible SSH limitation
- Updated Slurm plugin docs to remove `sshproxy` section
- API base endpoint is now configurable from an environment variable
- Removed unused lattice attributes to reduce asset uploads

### Fixed

- Improved handling of Covalent version mismatches between client and
executor environments

### Operations

- Allow installing a specific commit sha to ease testing

## [0.234.1-rc.0] - 2024-05-10

Expand Down
10 changes: 5 additions & 5 deletions covalent/_api/apiclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(self, dispatcher_addr: str, adapter: HTTPAdapter = None, auto_raise
self.adapter = adapter
self.auto_raise = auto_raise

def prepare_headers(self, **kwargs):
def prepare_headers(self, kwargs):
extra_headers = CovalentAPIClient.get_extra_headers()
headers = kwargs.get("headers", {})
if headers:
Expand All @@ -42,7 +42,7 @@ def prepare_headers(self, **kwargs):
return headers

def get(self, endpoint: str, **kwargs):
headers = self.prepare_headers(**kwargs)
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand All @@ -62,7 +62,7 @@ def get(self, endpoint: str, **kwargs):
return r

def put(self, endpoint: str, **kwargs):
headers = self.prepare_headers()
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand All @@ -81,7 +81,7 @@ def put(self, endpoint: str, **kwargs):
return r

def post(self, endpoint: str, **kwargs):
headers = self.prepare_headers()
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand All @@ -100,7 +100,7 @@ def post(self, endpoint: str, **kwargs):
return r

def delete(self, endpoint: str, **kwargs):
headers = self.prepare_headers()
headers = self.prepare_headers(kwargs)
url = self.dispatcher_addr + endpoint
try:
with requests.Session() as session:
Expand Down
16 changes: 13 additions & 3 deletions covalent/_dispatcher_plugins/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import tempfile
from copy import deepcopy
from functools import wraps
Expand Down Expand Up @@ -47,6 +48,9 @@
dispatch_cache_dir.mkdir(parents=True, exist_ok=True)


BASE_ENDPOINT = os.getenv("COVALENT_DISPATCH_BASE_ENDPOINT", "/api/v2/dispatches")


def get_redispatch_request_body_v2(
dispatch_id: str,
staging_dir: str,
Expand Down Expand Up @@ -540,10 +544,10 @@ def register_manifest(
dispatcher_addr = format_server_url()

stripped = strip_local_uris(manifest) if push_assets else manifest
endpoint = "/api/v2/dispatches"
endpoint = BASE_ENDPOINT

if parent_dispatch_id:
endpoint = f"{endpoint}/{parent_dispatch_id}/subdispatches"
endpoint = f"{BASE_ENDPOINT}/{parent_dispatch_id}/sublattices"

r = APIClient(dispatcher_addr).post(endpoint, data=stripped.model_dump_json())
r.raise_for_status()
Expand Down Expand Up @@ -615,6 +619,7 @@ def _upload_asset(local_uri, remote_uri):
else:
local_path = local_uri

filesize = os.path.getsize(local_path)
with open(local_path, "rb") as reader:
app_log.debug(f"uploading to {remote_uri}")
f = furl(remote_uri)
Expand All @@ -624,6 +629,11 @@ def _upload_asset(local_uri, remote_uri):
dispatcher_addr = f"{scheme}://{host}:{port}"
endpoint = str(f.path)
api_client = APIClient(dispatcher_addr)
if f.query:
endpoint = f"{endpoint}?{f.query}"

# Workaround for Requests bug when streaming from empty files
data = reader.read() if filesize < 50 else reader

r = api_client.put(endpoint, data=reader)
r = api_client.put(endpoint, headers={"Content-Length": str(filesize)}, data=data)
r.raise_for_status()
33 changes: 1 addition & 32 deletions covalent/_results_manager/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import os
import re
from datetime import datetime
from typing import TYPE_CHECKING, Any, Dict, List, Set, Union
from typing import TYPE_CHECKING, Any, Dict, List, Union

from .._shared_files import logger
from .._shared_files.config import get_config
Expand Down Expand Up @@ -516,34 +516,3 @@ def _convert_to_electron_result(self) -> Any:
"""

return self._result


def _filter_cova_decorators(function_string: str, cova_imports: Set[str]) -> str:
"""
Given a string representing a function, comment out any Covalent-related decorators.

Args
function_string: A string representation of a workflow function.

Returns:
The function string with Covalent-related decorators commented out.
"""

has_cova_decorator = False
in_decorator = 0
function_lines = function_string.split("\n")
for i in range(len(function_lines)):
line = function_lines[i].strip()
if in_decorator > 0:
function_lines[i] = f"# {function_lines[i]}"
in_decorator += line.count("(")
in_decorator -= line.count(")")
elif line.startswith("@"):
decorator_name = line.split("@")[1].split(".")[0].split("(")[0]
if decorator_name in cova_imports:
function_lines[i] = f"# {function_lines[i]}"
has_cova_decorator = True
in_decorator += line.count("(")
in_decorator -= line.count(")")

return "\n".join(function_lines) if has_cova_decorator else function_string
4 changes: 2 additions & 2 deletions covalent/_serialize/electron.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ def _get_node_custom_assets(node_attrs: dict) -> Dict[str, AssetSchema]:
def serialize_node(node_id: int, node_attrs: dict, node_storage_path) -> ElectronSchema:
meta = _serialize_node_metadata(node_attrs, node_storage_path)
assets = _serialize_node_assets(node_attrs, node_storage_path)
custom_assets = _get_node_custom_assets(node_attrs)
return ElectronSchema(id=node_id, metadata=meta, assets=assets, custom_assets=custom_assets)
assets._custom = _get_node_custom_assets(node_attrs)
return ElectronSchema(id=node_id, metadata=meta, assets=assets)


def deserialize_node(e: ElectronSchema, metadata_only: bool = False) -> dict:
Expand Down
49 changes: 2 additions & 47 deletions covalent/_serialize/lattice.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,6 @@
"workflow_function_string": AssetType.TEXT,
"doc": AssetType.TEXT,
"inputs": AssetType.TRANSPORTABLE,
"named_args": AssetType.TRANSPORTABLE,
"named_kwargs": AssetType.TRANSPORTABLE,
"cova_imports": AssetType.JSONABLE,
"lattice_imports": AssetType.TEXT,
"hooks": AssetType.JSONABLE,
}

Expand Down Expand Up @@ -112,33 +108,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets:
lat.inputs, ASSET_TYPES["inputs"], storage_path, ASSET_FILENAME_MAP["inputs"]
)

# Deprecate
named_args_asset = save_asset(
lat.named_args,
ASSET_TYPES["named_args"],
storage_path,
ASSET_FILENAME_MAP["named_args"],
)
named_kwargs_asset = save_asset(
lat.named_kwargs,
ASSET_TYPES["named_kwargs"],
storage_path,
ASSET_FILENAME_MAP["named_kwargs"],
)
cova_imports_asset = save_asset(
lat.cova_imports,
ASSET_TYPES["cova_imports"],
storage_path,
ASSET_FILENAME_MAP["cova_imports"],
)
lattice_imports_asset = save_asset(
lat.lattice_imports,
ASSET_TYPES["lattice_imports"],
storage_path,
ASSET_FILENAME_MAP["lattice_imports"],
)

# NOTE: these are actually JSONable
hooks_asset = save_asset(
lat.metadata["hooks"],
ASSET_TYPES["hooks"],
Expand All @@ -151,10 +120,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets:
workflow_function_string=workflow_func_str_asset,
doc=docstring_asset,
inputs=inputs_asset,
named_args=named_args_asset,
named_kwargs=named_kwargs_asset,
cova_imports=cova_imports_asset,
lattice_imports=lattice_imports_asset,
hooks=hooks_asset,
)

Expand All @@ -166,20 +131,12 @@ def _deserialize_lattice_assets(assets: LatticeAssets) -> dict:
)
doc = load_asset(assets.doc, ASSET_TYPES["doc"])
inputs = load_asset(assets.inputs, ASSET_TYPES["inputs"])
named_args = load_asset(assets.named_args, ASSET_TYPES["named_args"])
named_kwargs = load_asset(assets.named_kwargs, ASSET_TYPES["named_kwargs"])
cova_imports = load_asset(assets.cova_imports, ASSET_TYPES["cova_imports"])
lattice_imports = load_asset(assets.lattice_imports, ASSET_TYPES["lattice_imports"])
hooks = load_asset(assets.hooks, ASSET_TYPES["hooks"])
return {
"workflow_function": workflow_function,
"workflow_function_string": workflow_function_string,
"__doc__": doc,
"inputs": inputs,
"named_args": named_args,
"named_kwargs": named_kwargs,
"cova_imports": cova_imports,
"lattice_imports": lattice_imports,
"metadata": {
"hooks": hooks,
},
Expand All @@ -194,12 +151,10 @@ def _get_lattice_custom_assets(lat: Lattice) -> Dict[str, AssetSchema]:
def serialize_lattice(lat, storage_path: str) -> LatticeSchema:
meta = _serialize_lattice_metadata(lat)
assets = _serialize_lattice_assets(lat, storage_path)
custom_assets = _get_lattice_custom_assets(lat)
assets._custom = _get_lattice_custom_assets(lat)
tg = serialize_transport_graph(lat.transport_graph, storage_path)

return LatticeSchema(
metadata=meta, assets=assets, custom_assets=custom_assets, transport_graph=tg
)
return LatticeSchema(metadata=meta, assets=assets, transport_graph=tg)


def deserialize_lattice(model: LatticeSchema) -> Lattice:
Expand Down
15 changes: 5 additions & 10 deletions covalent/_shared_files/schemas/electron.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from datetime import datetime
from typing import Dict, Optional

from pydantic import BaseModel, field_validator
from pydantic import BaseModel

from .asset import AssetSchema
from .common import StatusEnum
Expand Down Expand Up @@ -91,6 +91,8 @@ class ElectronAssets(BaseModel):
# user dependent assets
hooks: AssetSchema

_custom: Optional[Dict[str, AssetSchema]] = None


class ElectronMetadata(BaseModel):
task_group_id: int
Expand All @@ -103,6 +105,8 @@ class ElectronMetadata(BaseModel):
start_time: Optional[datetime] = None
end_time: Optional[datetime] = None

_custom: Optional[Dict] = None

# For use by redispatch
def reset(self):
self.status = StatusEnum.NEW_OBJECT
Expand All @@ -114,12 +118,3 @@ class ElectronSchema(BaseModel):
id: int
metadata: ElectronMetadata
assets: ElectronAssets
custom_assets: Optional[Dict[str, AssetSchema]] = None

@field_validator("custom_assets")
def check_custom_asset_keys(cls, v):
if v is not None:
for key in v:
if key in ASSET_FILENAME_MAP:
raise ValueError(f"Asset {key} conflicts with built-in key")
return v
29 changes: 11 additions & 18 deletions covalent/_shared_files/schemas/lattice.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from typing import Dict, Optional

from pydantic import BaseModel, field_validator
from pydantic import BaseModel

from .asset import AssetSchema
from .transport_graph import TransportGraphSchema
Expand All @@ -39,10 +39,6 @@
"workflow_function_string",
"__doc__",
"inputs",
"named_args",
"named_kwargs",
"cova_imports",
"lattice_imports",
# user dependent assets
"hooks",
}
Expand Down Expand Up @@ -83,14 +79,18 @@ class LatticeAssets(BaseModel):
workflow_function_string: AssetSchema
doc: AssetSchema # __doc__
inputs: AssetSchema
named_args: AssetSchema
named_kwargs: AssetSchema
cova_imports: AssetSchema
lattice_imports: AssetSchema

# Deprecated
named_args: AssetSchema = AssetSchema(size=0)
named_kwargs: AssetSchema = AssetSchema(size=0)
cova_imports: AssetSchema = AssetSchema(size=0)
lattice_imports: AssetSchema = AssetSchema(size=0)

# lattice.metadata
hooks: AssetSchema

_custom: Optional[Dict[str, AssetSchema]] = None


class LatticeMetadata(BaseModel):
name: str # __name__
Expand All @@ -101,18 +101,11 @@ class LatticeMetadata(BaseModel):
python_version: Optional[str] = None
covalent_version: Optional[str] = None

_custom: Optional[Dict] = None


class LatticeSchema(BaseModel):
metadata: LatticeMetadata
assets: LatticeAssets
custom_assets: Optional[Dict[str, AssetSchema]] = None

transport_graph: TransportGraphSchema

@field_validator("custom_assets")
def check_custom_asset_keys(cls, v):
if v is not None:
for key in v:
if key in ASSET_FILENAME_MAP:
raise ValueError(f"Asset {key} conflicts with built-in key")
return v
Loading
Loading