From 2e94a3ff22fccb814d8dfad8c3105191e2518b39 Mon Sep 17 00:00:00 2001 From: natthan-pigoux Date: Wed, 8 Nov 2023 09:30:29 +0100 Subject: [PATCH 1/2] Add DB,routes and tests for (un)assigning sandboxes to jobs --- diracx-core/src/diracx/core/models.py | 5 + .../src/diracx/db/sql/jobs/status_utility.py | 2 +- .../src/diracx/db/sql/sandbox_metadata/db.py | 97 ++++++++++++++++--- diracx-db/tests/test_sandbox_metadata.py | 87 ++++++++++++++++- .../diracx/routers/job_manager/sandboxes.py | 77 ++++++++++++++- diracx-routers/tests/jobs/test_sandboxes.py | 92 +++++++++++++++++- 6 files changed, 340 insertions(+), 20 deletions(-) diff --git a/diracx-core/src/diracx/core/models.py b/diracx-core/src/diracx/core/models.py index 3aae477c..243068bc 100644 --- a/diracx-core/src/diracx/core/models.py +++ b/diracx-core/src/diracx/core/models.py @@ -120,3 +120,8 @@ class SandboxInfo(BaseModel): checksum: str = Field(pattern=r"^[0-f]{64}$") size: int = Field(ge=1) format: SandboxFormat + + +class SandboxType(StrEnum): + Input: str = "Input" + Output: str = "Output" diff --git a/diracx-db/src/diracx/db/sql/jobs/status_utility.py b/diracx-db/src/diracx/db/sql/jobs/status_utility.py index 09b00edf..0451cc6b 100644 --- a/diracx-db/src/diracx/db/sql/jobs/status_utility.py +++ b/diracx-db/src/diracx/db/sql/jobs/status_utility.py @@ -272,7 +272,7 @@ async def remove_jobs( # TODO: this was also not done in the JobManagerHandler, but it was done in the JobCleaningAgent # I think it should be done here as well - await sandbox_metadata_db.unassign_sandbox_from_jobs(job_ids) + await sandbox_metadata_db.unassign_sandboxes_to_jobs(job_ids) # Remove the job from TaskQueueDB await _remove_jobs_from_task_queue(job_ids, config, task_queue_db, background_task) diff --git a/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py b/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py index e7715d1e..47a9e79e 100644 --- a/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py +++ b/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py @@ -1,9 +1,10 @@ from __future__ import annotations +from typing import Any + import sqlalchemy -from sqlalchemy import delete -from diracx.core.models import SandboxInfo, UserInfo +from diracx.core.models import SandboxInfo, SandboxType, UserInfo from diracx.db.sql.utils import BaseSQLDB, utcnow from .schema import Base as SandboxMetadataDBBase @@ -76,7 +77,7 @@ async def update_sandbox_last_access_time(self, se_name: str, pfn: str) -> None: result = await self.conn.execute(stmt) assert result.rowcount == 1 - async def sandbox_is_assigned(self, se_name: str, pfn: str) -> bool: + async def sandbox_is_assigned(self, pfn: str, se_name: str) -> bool: """Checks if a sandbox exists and has been assigned.""" stmt: sqlalchemy.Executable = sqlalchemy.select(sb_SandBoxes.Assigned).where( sb_SandBoxes.SEName == se_name, sb_SandBoxes.SEPFN == pfn @@ -84,13 +85,85 @@ async def sandbox_is_assigned(self, se_name: str, pfn: str) -> bool: result = await self.conn.execute(stmt) is_assigned = result.scalar_one() return is_assigned - return True - - async def unassign_sandbox_from_jobs(self, job_ids: list[int]): - """ - Unassign sandbox from jobs - """ - stmt = delete(sb_EntityMapping).where( - sb_EntityMapping.EntityId.in_(f"Job:{job_id}" for job_id in job_ids) + + @staticmethod + def jobid_to_entity_id(job_id: int) -> str: + """Define the entity id as 'Entity:entity_id' due to the DB definition""" + return f"Job:{job_id}" + + async def get_sandbox_assigned_to_job( + self, job_id: int, sb_type: SandboxType + ) -> list[Any]: + """Get the sandbox assign to job""" + entity_id = self.jobid_to_entity_id(job_id) + stmt = ( + sqlalchemy.select(sb_SandBoxes.SEPFN) + .where(sb_SandBoxes.SBId == sb_EntityMapping.SBId) + .where( + sb_EntityMapping.EntityId == entity_id, + sb_EntityMapping.Type == sb_type, + ) ) - await self.conn.execute(stmt) + result = await self.conn.execute(stmt) + return [result.scalar()] + + async def assign_sandbox_to_jobs( + self, + jobs_ids: list[int], + pfn: str, + sb_type: SandboxType, + se_name: str, + ) -> None: + """Mapp sandbox and jobs""" + for job_id in jobs_ids: + # Define the entity id as 'Entity:entity_id' due to the DB definition: + entity_id = self.jobid_to_entity_id(job_id) + select_sb_id = sqlalchemy.select( + sb_SandBoxes.SBId, + sqlalchemy.literal(entity_id).label("EntityId"), + sqlalchemy.literal(sb_type).label("Type"), + ).where( + sb_SandBoxes.SEName == se_name, + sb_SandBoxes.SEPFN == pfn, + ) + stmt = sqlalchemy.insert(sb_EntityMapping).from_select( + ["SBId", "EntityId", "Type"], select_sb_id + ) + await self.conn.execute(stmt) + + stmt = ( + sqlalchemy.update(sb_SandBoxes) + .where(sb_SandBoxes.SEPFN == pfn) + .values(Assigned=True) + ) + result = await self.conn.execute(stmt) + assert result.rowcount == 1 + + async def unassign_sandboxes_to_jobs(self, jobs_ids: list[int]) -> None: + """Delete mapping between jobs and sandboxes""" + for job_id in jobs_ids: + entity_id = self.jobid_to_entity_id(job_id) + sb_sel_stmt = sqlalchemy.select( + sb_SandBoxes.SBId, + ).where(sb_EntityMapping.EntityId == entity_id) + + result = await self.conn.execute(sb_sel_stmt) + sb_ids = [row.SBId for row in result] + + del_stmt = sqlalchemy.delete(sb_EntityMapping).where( + sb_EntityMapping.EntityId == entity_id + ) + await self.conn.execute(del_stmt) + + sb_entity_sel_stmt = sqlalchemy.select(sb_EntityMapping.SBId).where( + sb_EntityMapping.SBId.in_(sb_ids) + ) + result = await self.conn.execute(sb_entity_sel_stmt) + remaining_sb_ids = [row.SBId for row in result] + if not remaining_sb_ids: + unassign_stmt = ( + sqlalchemy.update(sb_SandBoxes) + .where(sb_SandBoxes.SBId.in_(sb_ids)) + .values(Assigned=False) + ) + await self.conn.execute(unassign_stmt) diff --git a/diracx-db/tests/test_sandbox_metadata.py b/diracx-db/tests/test_sandbox_metadata.py index 9d19f73c..5a43d8b9 100644 --- a/diracx-db/tests/test_sandbox_metadata.py +++ b/diracx-db/tests/test_sandbox_metadata.py @@ -9,7 +9,7 @@ from diracx.core.models import SandboxInfo, UserInfo from diracx.db.sql.sandbox_metadata.db import SandboxMetadataDB -from diracx.db.sql.sandbox_metadata.schema import sb_SandBoxes +from diracx.db.sql.sandbox_metadata.schema import sb_EntityMapping, sb_SandBoxes @pytest.fixture @@ -46,7 +46,7 @@ async def test_insert_sandbox(sandbox_metadata_db: SandboxMetadataDB): assert pfn1 not in db_contents async with sandbox_metadata_db: with pytest.raises(sqlalchemy.exc.NoResultFound): - await sandbox_metadata_db.sandbox_is_assigned("SandboxSE", pfn1) + await sandbox_metadata_db.sandbox_is_assigned(pfn1, "SandboxSE") # Insert the sandbox async with sandbox_metadata_db: @@ -65,7 +65,7 @@ async def test_insert_sandbox(sandbox_metadata_db: SandboxMetadataDB): # The sandbox still hasn't been assigned async with sandbox_metadata_db: - assert not await sandbox_metadata_db.sandbox_is_assigned("SandboxSE", pfn1) + assert not await sandbox_metadata_db.sandbox_is_assigned(pfn1, "SandboxSE") # Inserting again should update the last access time await asyncio.sleep(1) # The timestamp only has second precision @@ -90,3 +90,84 @@ async def _dump_db( ) res = await sandbox_metadata_db.conn.execute(stmt) return {row.SEPFN: (row.OwnerId, row.LastAccessTime) for row in res} + + +async def test_assign_and_unsassign_sandbox_to_jobs( + sandbox_metadata_db: SandboxMetadataDB, +): + pfn = secrets.token_hex() + user_info = UserInfo( + sub="vo:sub", preferred_username="user1", dirac_group="group1", vo="vo" + ) + dummy_jobid = 666 + sandbox_se = "SandboxSE" + # Insert the sandbox + async with sandbox_metadata_db: + await sandbox_metadata_db.insert_sandbox(sandbox_se, user_info, pfn, 100) + + async with sandbox_metadata_db: + stmt = sqlalchemy.select(sb_SandBoxes.SBId, sb_SandBoxes.SEPFN) + res = await sandbox_metadata_db.conn.execute(stmt) + db_contents = {row.SEPFN: row.SBId for row in res} + sb_id_1 = db_contents[pfn] + # The sandbox still hasn't been assigned + async with sandbox_metadata_db: + assert not await sandbox_metadata_db.sandbox_is_assigned(pfn, sandbox_se) + + # Check there is no mapping + async with sandbox_metadata_db: + stmt = sqlalchemy.select( + sb_EntityMapping.SBId, sb_EntityMapping.EntityId, sb_EntityMapping.Type + ) + res = await sandbox_metadata_db.conn.execute(stmt) + db_contents = {row.SBId: (row.EntityId, row.Type) for row in res} + assert db_contents == {} + + # Assign sandbox with dummy jobid + async with sandbox_metadata_db: + await sandbox_metadata_db.assign_sandbox_to_jobs( + jobs_ids=[dummy_jobid], pfn=pfn, sb_type="Output", se_name=sandbox_se + ) + # Check if sandbox and job are mapped + async with sandbox_metadata_db: + stmt = sqlalchemy.select( + sb_EntityMapping.SBId, sb_EntityMapping.EntityId, sb_EntityMapping.Type + ) + res = await sandbox_metadata_db.conn.execute(stmt) + db_contents = {row.SBId: (row.EntityId, row.Type) for row in res} + + entity_id_1, sb_type = db_contents[sb_id_1] + assert entity_id_1 == f"Job:{dummy_jobid}" + assert sb_type == "Output" + + async with sandbox_metadata_db: + stmt = sqlalchemy.select(sb_SandBoxes.SBId, sb_SandBoxes.SEPFN) + res = await sandbox_metadata_db.conn.execute(stmt) + db_contents = {row.SEPFN: row.SBId for row in res} + sb_id_1 = db_contents[pfn] + # The sandbox should be assigned + async with sandbox_metadata_db: + assert await sandbox_metadata_db.sandbox_is_assigned(pfn, sandbox_se) + + # Unassign the sandbox to job + async with sandbox_metadata_db: + await sandbox_metadata_db.unassign_sandboxes_to_jobs([dummy_jobid]) + + # Entity should not exists anymore + async with sandbox_metadata_db: + stmt = sqlalchemy.select(sb_EntityMapping.SBId).where( + sb_EntityMapping.EntityId == entity_id_1 + ) + res = await sandbox_metadata_db.conn.execute(stmt) + entity_sb_id = [row.SBId for row in res] + assert entity_sb_id == [] + + # Should not be assigned anymore + async with sandbox_metadata_db: + assert await sandbox_metadata_db.sandbox_is_assigned(pfn, sandbox_se) is False + # Check the mapping has been deleted + async with sandbox_metadata_db: + stmt = sqlalchemy.select(sb_EntityMapping.SBId) + res = await sandbox_metadata_db.conn.execute(stmt) + res_sb_id = [row.SBId for row in res] + assert sb_id_1 not in res_sb_id diff --git a/diracx-routers/src/diracx/routers/job_manager/sandboxes.py b/diracx-routers/src/diracx/routers/job_manager/sandboxes.py index a9ff1b14..6e1c837e 100644 --- a/diracx-routers/src/diracx/routers/job_manager/sandboxes.py +++ b/diracx-routers/src/diracx/routers/job_manager/sandboxes.py @@ -2,17 +2,19 @@ import contextlib from http import HTTPStatus -from typing import TYPE_CHECKING, Annotated, AsyncIterator +from typing import TYPE_CHECKING, Annotated, AsyncIterator, Literal from aiobotocore.session import get_session from botocore.config import Config from botocore.errorfactory import ClientError -from fastapi import Depends, HTTPException, Query +from fastapi import Body, Depends, HTTPException, Query from pydantic import BaseModel, PrivateAttr +from pyparsing import Any from sqlalchemy.exc import NoResultFound from diracx.core.models import ( SandboxInfo, + SandboxType, ) from diracx.core.properties import JOB_ADMINISTRATOR, NORMAL_USER from diracx.core.s3 import ( @@ -104,7 +106,7 @@ async def initiate_sandbox_upload( try: exists_and_assigned = await sandbox_metadata_db.sandbox_is_assigned( - settings.se_name, pfn + pfn, settings.se_name ) except NoResultFound: # The sandbox doesn't exist in the database @@ -194,3 +196,72 @@ async def get_sandbox_file( return SandboxDownloadResponse( url=presigned_url, expires_in=settings.url_validity_seconds ) + + +@router.get("/{job_id}/sandbox") +async def get_job_sandboxes( + job_id: int, + sandbox_metadata_db: SandboxMetadataDB, +) -> dict[str, list[Any]]: + """Get input and output sandboxes of given job""" + # TODO: check that user as created the job or is admin + input_sb = await sandbox_metadata_db.get_sandbox_assigned_to_job( + job_id, SandboxType.Input + ) + output_sb = await sandbox_metadata_db.get_sandbox_assigned_to_job( + job_id, SandboxType.Output + ) + return {SandboxType.Input: input_sb, SandboxType.Output: output_sb} + + +@router.get("/{job_id}/sandbox/{sandbox_type}") +async def get_job_sandbox( + job_id: int, + sandbox_metadata_db: SandboxMetadataDB, + sandbox_type: Literal["input", "output"], +) -> list[Any]: + """Get input or output sandbox of given job""" + # TODO: check that user has created the job or is admin + job_sb_pfns = await sandbox_metadata_db.get_sandbox_assigned_to_job( + job_id, SandboxType(sandbox_type.capitalize()) + ) + + return job_sb_pfns + + +@router.patch("/{job_id}/sandbox/output") +async def assign_sandbox_to_job( + job_id: int, + pfn: Annotated[str, Body(max_length=256, pattern=SANDBOX_PFN_REGEX)], + sandbox_metadata_db: SandboxMetadataDB, + settings: SandboxStoreSettings, +): + """Mapp the pfn as output sandbox to job""" + # TODO: check that user has created the job or is admin + short_pfn = pfn.split("|", 1)[-1] + await sandbox_metadata_db.assign_sandbox_to_jobs( + jobs_ids=[job_id], + pfn=short_pfn, + sb_type=SandboxType.Output, + se_name=settings.se_name, + ) + + +@router.delete("/{job_id}/sandbox") +async def unassign_job_sandboxes( + job_id: int, + sandbox_metadata_db: SandboxMetadataDB, +): + """Delete single job sandbox mapping""" + # TODO: check that user has created the job or is admin + await sandbox_metadata_db.unassign_sandboxes_to_jobs([job_id]) + + +@router.delete("/sandbox") +async def unassign_bulk_jobs_sandboxes( + jobs_ids: Annotated[list[int], Query()], + sandbox_metadata_db: SandboxMetadataDB, +): + """Delete bulk jobs sandbox mapping""" + # TODO: check that user has created the job or is admin + await sandbox_metadata_db.unassign_sandboxes_to_jobs(jobs_ids) diff --git a/diracx-routers/tests/jobs/test_sandboxes.py b/diracx-routers/tests/jobs/test_sandboxes.py index c012b87c..ca8a74bd 100644 --- a/diracx-routers/tests/jobs/test_sandboxes.py +++ b/diracx-routers/tests/jobs/test_sandboxes.py @@ -13,7 +13,13 @@ from diracx.routers.auth.utils import AuthSettings pytestmark = pytest.mark.enabled_dependencies( - ["AuthSettings", "SandboxMetadataDB", "SandboxStoreSettings"] + [ + "AuthSettings", + "JobDB", + "JobLoggingDB", + "SandboxMetadataDB", + "SandboxStoreSettings", + ] ) @@ -92,3 +98,87 @@ def test_upload_oversized(normal_user_client: TestClient): ) assert r.status_code == 400, r.text assert "Sandbox too large" in r.json()["detail"], r.text + + +TEST_JDL = """ + Arguments = "jobDescription.xml -o LogLevel=INFO"; + Executable = "dirac-jobexec"; + JobGroup = jobGroup; + JobName = jobName; + JobType = User; + LogLevel = INFO; + OutputSandbox = + { + Script1_CodeOutput.log, + std.err, + std.out + }; + Priority = 1; + Site = ANY; + StdError = std.err; + StdOutput = std.out; +""" + + +def test_assign_then_unassign_sandboxes_to_jobs(normal_user_client: TestClient): + data = secrets.token_bytes(512) + checksum = hashlib.sha256(data).hexdigest() + + # Upload Sandbox: + r = normal_user_client.post( + "/api/jobs/sandbox", + json={ + "checksum_algorithm": "sha256", + "checksum": checksum, + "size": len(data), + "format": "tar.bz2", + }, + ) + assert r.status_code == 200, r.text + upload_info = r.json() + assert upload_info["url"] + sandbox_pfn = upload_info["pfn"] + assert sandbox_pfn.startswith("SB:SandboxSE|/S3/") + + # Submit a job: + job_definitions = [TEST_JDL] + r = normal_user_client.post("/api/jobs/", json=job_definitions) + assert r.status_code == 200, r.json() + assert len(r.json()) == len(job_definitions) + job_id = r.json()[0]["JobID"] + + # Getting job sb: + r = normal_user_client.get(f"/api/jobs/{job_id}/sandbox/output") + assert r.status_code == 200 + # Should be empty + assert r.json()[0] is None + + # Assign sb to job: + r = normal_user_client.patch( + f"/api/jobs/{job_id}/sandbox/output", + json=sandbox_pfn, + ) + assert r.status_code == 200 + + # Get the sb again: + short_pfn = sandbox_pfn.split("|", 1)[-1] + r = normal_user_client.get(f"/api/jobs/{job_id}/sandbox") + assert r.status_code == 200 + assert r.json()["Input"] == [None] + assert r.json()["Output"] == [short_pfn] + + r = normal_user_client.get(f"/api/jobs/{job_id}/sandbox/output") + assert r.status_code == 200 + assert r.json()[0] == short_pfn + + # Unassign sb to job: + job_ids = [job_id] + r = normal_user_client.delete("/api/jobs/sandbox", params={"jobs_ids": job_ids}) + assert r.status_code == 200 + + # Get the sb again, it should'nt be there anymore: + short_pfn = sandbox_pfn.split("|", 1)[-1] + r = normal_user_client.get(f"/api/jobs/{job_id}/sandbox") + assert r.status_code == 200 + assert r.json()["Input"] == [None] + assert r.json()["Output"] == [None] From e43ded90661633b76f7023c94e319815a467fc17 Mon Sep 17 00:00:00 2001 From: natthan-pigoux Date: Tue, 9 Apr 2024 15:50:56 +0200 Subject: [PATCH 2/2] Regenerate client --- diracx-client/src/diracx/client/__init__.py | 2 +- diracx-client/src/diracx/client/_client.py | 2 +- .../src/diracx/client/_configuration.py | 2 +- diracx-client/src/diracx/client/_vendor.py | 2 +- .../src/diracx/client/aio/__init__.py | 2 +- .../src/diracx/client/aio/_client.py | 2 +- .../src/diracx/client/aio/_configuration.py | 2 +- .../src/diracx/client/aio/_vendor.py | 2 +- .../diracx/client/aio/operations/__init__.py | 2 +- .../client/aio/operations/_operations.py | 406 ++++++++++++-- .../src/diracx/client/models/__init__.py | 8 +- .../src/diracx/client/models/_enums.py | 19 +- .../src/diracx/client/models/_models.py | 2 +- .../src/diracx/client/operations/__init__.py | 2 +- .../diracx/client/operations/_operations.py | 513 ++++++++++++++++-- 15 files changed, 868 insertions(+), 100 deletions(-) diff --git a/diracx-client/src/diracx/client/__init__.py b/diracx-client/src/diracx/client/__init__.py index b6f57093..6c158e3c 100644 --- a/diracx-client/src/diracx/client/__init__.py +++ b/diracx-client/src/diracx/client/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/_client.py b/diracx-client/src/diracx/client/_client.py index 4f137096..33abe916 100644 --- a/diracx-client/src/diracx/client/_client.py +++ b/diracx-client/src/diracx/client/_client.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/_configuration.py b/diracx-client/src/diracx/client/_configuration.py index 13efb6a0..8d6c22c4 100644 --- a/diracx-client/src/diracx/client/_configuration.py +++ b/diracx-client/src/diracx/client/_configuration.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/_vendor.py b/diracx-client/src/diracx/client/_vendor.py index 01633240..09887722 100644 --- a/diracx-client/src/diracx/client/_vendor.py +++ b/diracx-client/src/diracx/client/_vendor.py @@ -1,5 +1,5 @@ # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/__init__.py b/diracx-client/src/diracx/client/aio/__init__.py index b6f57093..6c158e3c 100644 --- a/diracx-client/src/diracx/client/aio/__init__.py +++ b/diracx-client/src/diracx/client/aio/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/_client.py b/diracx-client/src/diracx/client/aio/_client.py index a52ed049..79235982 100644 --- a/diracx-client/src/diracx/client/aio/_client.py +++ b/diracx-client/src/diracx/client/aio/_client.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/_configuration.py b/diracx-client/src/diracx/client/aio/_configuration.py index 1dc173e7..c8ab98a6 100644 --- a/diracx-client/src/diracx/client/aio/_configuration.py +++ b/diracx-client/src/diracx/client/aio/_configuration.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/_vendor.py b/diracx-client/src/diracx/client/aio/_vendor.py index 01633240..09887722 100644 --- a/diracx-client/src/diracx/client/aio/_vendor.py +++ b/diracx-client/src/diracx/client/aio/_vendor.py @@ -1,5 +1,5 @@ # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/operations/__init__.py b/diracx-client/src/diracx/client/aio/operations/__init__.py index 7ef1855d..9ad57998 100644 --- a/diracx-client/src/diracx/client/aio/operations/__init__.py +++ b/diracx-client/src/diracx/client/aio/operations/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/operations/_operations.py b/diracx-client/src/diracx/client/aio/operations/_operations.py index c392b629..26ccb985 100644 --- a/diracx-client/src/diracx/client/aio/operations/_operations.py +++ b/diracx-client/src/diracx/client/aio/operations/_operations.py @@ -1,12 +1,23 @@ # pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from typing import ( + Any, + Callable, + Dict, + IO, + List, + Optional, + Type, + TypeVar, + Union, + overload, +) from azure.core import MatchConditions from azure.core.exceptions import ( @@ -35,8 +46,11 @@ build_auth_revoke_refresh_token_request, build_auth_userinfo_request, build_config_serve_config_request, + build_jobs_assign_sandbox_to_job_request, build_jobs_delete_bulk_jobs_request, build_jobs_delete_single_job_request, + build_jobs_get_job_sandbox_request, + build_jobs_get_job_sandboxes_request, build_jobs_get_job_status_bulk_request, build_jobs_get_job_status_history_bulk_request, build_jobs_get_sandbox_file_request, @@ -56,6 +70,8 @@ build_jobs_set_single_job_status_request, build_jobs_submit_bulk_jobs_request, build_jobs_summary_request, + build_jobs_unassign_bulk_jobs_sandboxes_request, + build_jobs_unassign_job_sandboxes_request, build_well_known_installation_metadata_request, build_well_known_openid_configuration_request, ) @@ -97,13 +113,13 @@ def __init__(self, *args, **kwargs) -> None: async def openid_configuration(self, **kwargs: Any) -> Any: """Openid Configuration. - Openid Configuration. + OpenID Connect discovery endpoint. :return: any :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -150,13 +166,13 @@ async def openid_configuration(self, **kwargs: Any) -> Any: async def installation_metadata(self, **kwargs: Any) -> _models.Metadata: """Installation Metadata. - Installation Metadata. + Get metadata about the dirac installation. :return: Metadata :rtype: ~client.models.Metadata :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -247,7 +263,7 @@ async def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -313,7 +329,7 @@ async def initiate_device_flow( :rtype: ~client.models.InitiateDeviceFlowResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -378,7 +394,7 @@ async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> A :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -427,13 +443,13 @@ async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> A async def finished(self, **kwargs: Any) -> Any: """Finished. - Finished. + This is the final step of the device flow. :return: any :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -487,7 +503,7 @@ async def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: :rtype: list[any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -543,7 +559,7 @@ async def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: :rtype: str :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -597,7 +613,7 @@ async def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: :rtype: ~client.models.UserInfoResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -655,7 +671,13 @@ async def authorization_flow( ) -> Any: """Authorization Flow. - Authorization Flow. + Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to + perform a authorization code flow. + + We set the user details obtained from the user authorize flow in a cookie + to be able to map the authorization flow with the corresponding + user authorize flow. :keyword response_type: "code" Required. :paramtype response_type: str or ~client.models.Enum0 @@ -675,7 +697,7 @@ async def authorization_flow( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -731,7 +753,13 @@ async def authorization_flow_complete( ) -> Any: """Authorization Flow Complete. - Authorization Flow Complete. + Complete the authorization flow. + + The user is redirected back to the DIRAC auth service after completing the IAM's authorization + flow. + We retrieve the original flow details from the decrypted state and store the ID token requested + from the IAM. + The user is then redirected to the client's redirect URI. :keyword code: Required. :paramtype code: str @@ -741,7 +769,7 @@ async def authorization_flow_complete( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -837,7 +865,7 @@ async def serve_config( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -931,7 +959,7 @@ async def get_sandbox_file( :rtype: ~client.models.SandboxDownloadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1047,7 +1075,7 @@ async def initiate_sandbox_upload( :rtype: ~client.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1104,6 +1132,304 @@ async def initiate_sandbox_upload( return deserialized # type: ignore + @distributed_trace_async + async def unassign_bulk_jobs_sandboxes( + self, *, jobs_ids: List[int], **kwargs: Any + ) -> Any: + """Unassign Bulk Jobs Sandboxes. + + Delete bulk jobs sandbox mapping. + + :keyword jobs_ids: Required. + :paramtype jobs_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_bulk_jobs_sandboxes_request( + jobs_ids=jobs_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + await response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_job_sandboxes( + self, job_id: int, **kwargs: Any + ) -> Dict[str, List[Any]]: + """Get Job Sandboxes. + + Get input and output sandboxes of given job. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to list of any + :rtype: dict[str, list[any]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[Any]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + await response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("{[object]}", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: + """Unassign Job Sandboxes. + + Delete single job sandbox mapping. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + await response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_job_sandbox( + self, job_id: int, sandbox_type: Union[str, _models.SandboxType], **kwargs: Any + ) -> List[Any]: + """Get Job Sandbox. + + Get input or output sandbox of given job. + + :param job_id: Required. + :type job_id: int + :param sandbox_type: Known values are: "input" and "output". Required. + :type sandbox_type: str or ~client.models.SandboxType + :return: list of any + :rtype: list[any] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[Any]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandbox_request( + job_id=job_id, + sandbox_type=sandbox_type, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + await response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("[object]", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> Any: + """Assign Sandbox To Job. + + Mapp the pfn as output sandbox to job. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("Content-Type", "application/json") + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + _content = self._serialize.body(body, "str") + + _request = build_jobs_assign_sandbox_to_job_request( + job_id=job_id, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + await response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + @overload async def submit_bulk_jobs( self, body: List[str], *, content_type: str = "application/json", **kwargs: Any @@ -1154,7 +1480,7 @@ async def submit_bulk_jobs( :rtype: list[~client.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1223,7 +1549,7 @@ async def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1279,7 +1605,7 @@ async def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1339,7 +1665,7 @@ async def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1397,7 +1723,7 @@ async def get_job_status_bulk( :rtype: dict[str, ~client.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1513,7 +1839,7 @@ async def set_job_status_bulk( :rtype: dict[str, ~client.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1585,7 +1911,7 @@ async def get_job_status_history_bulk( :rtype: dict[str, list[~client.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1641,7 +1967,7 @@ async def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> An :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1697,7 +2023,7 @@ async def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1826,7 +2152,7 @@ async def search( :rtype: list[JSON] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1942,7 +2268,7 @@ async def summary( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2011,7 +2337,7 @@ async def get_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2067,7 +2393,7 @@ async def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2129,7 +2455,7 @@ async def set_single_job_properties( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2193,7 +2519,7 @@ async def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2253,7 +2579,7 @@ async def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2311,7 +2637,7 @@ async def get_single_job_status( :rtype: dict[str, ~client.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2436,7 +2762,7 @@ async def set_single_job_status( :rtype: dict[str, ~client.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2509,7 +2835,7 @@ async def get_single_job_status_history( :rtype: dict[str, list[~client.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/diracx-client/src/diracx/client/models/__init__.py b/diracx-client/src/diracx/client/models/__init__.py index 7ec8e05c..336cfe9d 100644 --- a/diracx-client/src/diracx/client/models/__init__.py +++ b/diracx-client/src/diracx/client/models/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -38,13 +38,14 @@ from ._enums import ChecksumAlgorithm from ._enums import Enum0 from ._enums import Enum1 -from ._enums import Enum10 from ._enums import Enum11 +from ._enums import Enum12 from ._enums import Enum2 from ._enums import Enum3 from ._enums import Enum4 from ._enums import JobStatus from ._enums import SandboxFormat +from ._enums import SandboxType from ._enums import ScalarSearchOperator from ._enums import VectorSearchOperator from ._patch import __all__ as _patch_all @@ -85,13 +86,14 @@ "ChecksumAlgorithm", "Enum0", "Enum1", - "Enum10", "Enum11", + "Enum12", "Enum2", "Enum3", "Enum4", "JobStatus", "SandboxFormat", + "SandboxType", "ScalarSearchOperator", "VectorSearchOperator", ] diff --git a/diracx-client/src/diracx/client/models/_enums.py b/diracx-client/src/diracx/client/models/_enums.py index 3c804657..d448e10b 100644 --- a/diracx-client/src/diracx/client/models/_enums.py +++ b/diracx-client/src/diracx/client/models/_enums.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -26,14 +26,14 @@ class Enum1(str, Enum, metaclass=CaseInsensitiveEnumMeta): S256 = "S256" -class Enum10(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum10.""" +class Enum11(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum11.""" ASC = "asc" -class Enum11(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum11.""" +class Enum12(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum12.""" DSC = "dsc" @@ -84,6 +84,13 @@ class SandboxFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): TAR_BZ2 = "tar.bz2" +class SandboxType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Sandbox Type.""" + + INPUT = "input" + OUTPUT = "output" + + class ScalarSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enumeration.""" @@ -97,5 +104,5 @@ class ScalarSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): class VectorSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enumeration.""" - IN = "in" + IN_ENUM = "in" NOT_IN = "not in" diff --git a/diracx-client/src/diracx/client/models/_models.py b/diracx-client/src/diracx/client/models/_models.py index c693fe19..3b470cee 100644 --- a/diracx-client/src/diracx/client/models/_models.py +++ b/diracx-client/src/diracx/client/models/_models.py @@ -1,7 +1,7 @@ # coding=utf-8 # pylint: disable=too-many-lines # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/operations/__init__.py b/diracx-client/src/diracx/client/operations/__init__.py index 7ef1855d..9ad57998 100644 --- a/diracx-client/src/diracx/client/operations/__init__.py +++ b/diracx-client/src/diracx/client/operations/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/operations/_operations.py b/diracx-client/src/diracx/client/operations/_operations.py index 953950f3..7d1984eb 100644 --- a/diracx-client/src/diracx/client/operations/_operations.py +++ b/diracx-client/src/diracx/client/operations/_operations.py @@ -1,12 +1,23 @@ # pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.2) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from typing import ( + Any, + Callable, + Dict, + IO, + List, + Optional, + Type, + TypeVar, + Union, + overload, +) from azure.core import MatchConditions from azure.core.exceptions import ( @@ -344,6 +355,120 @@ def build_jobs_initiate_sandbox_upload_request( return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) +def build_jobs_unassign_bulk_jobs_sandboxes_request( # pylint: disable=name-too-long + *, jobs_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/sandbox" + + # Construct parameters + _params["jobs_ids"] = _SERIALIZER.query("jobs_ids", jobs_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="DELETE", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_get_job_sandboxes_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_unassign_job_sandboxes_request( # pylint: disable=name-too-long + job_id: int, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, headers=_headers, **kwargs) + + +def build_jobs_get_job_sandbox_request( + job_id: int, sandbox_type: Union[str, _models.SandboxType], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox/{sandbox_type}" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + "sandbox_type": _SERIALIZER.url("sandbox_type", sandbox_type, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_assign_sandbox_to_job_request( + job_id: int, *, content: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox/output" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="PATCH", url=_url, headers=_headers, content=content, **kwargs + ) + + def build_jobs_submit_bulk_jobs_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -812,13 +937,13 @@ def __init__(self, *args, **kwargs): def openid_configuration(self, **kwargs: Any) -> Any: """Openid Configuration. - Openid Configuration. + OpenID Connect discovery endpoint. :return: any :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -865,13 +990,13 @@ def openid_configuration(self, **kwargs: Any) -> Any: def installation_metadata(self, **kwargs: Any) -> _models.Metadata: """Installation Metadata. - Installation Metadata. + Get metadata about the dirac installation. :return: Metadata :rtype: ~client.models.Metadata :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -962,7 +1087,7 @@ def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1028,7 +1153,7 @@ def initiate_device_flow( :rtype: ~client.models.InitiateDeviceFlowResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1093,7 +1218,7 @@ def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1142,13 +1267,13 @@ def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: def finished(self, **kwargs: Any) -> Any: """Finished. - Finished. + This is the final step of the device flow. :return: any :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1202,7 +1327,7 @@ def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: :rtype: list[any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1258,7 +1383,7 @@ def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: :rtype: str :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1312,7 +1437,7 @@ def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: :rtype: ~client.models.UserInfoResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1370,7 +1495,13 @@ def authorization_flow( ) -> Any: """Authorization Flow. - Authorization Flow. + Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to + perform a authorization code flow. + + We set the user details obtained from the user authorize flow in a cookie + to be able to map the authorization flow with the corresponding + user authorize flow. :keyword response_type: "code" Required. :paramtype response_type: str or ~client.models.Enum0 @@ -1390,7 +1521,7 @@ def authorization_flow( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1446,7 +1577,13 @@ def authorization_flow_complete( ) -> Any: """Authorization Flow Complete. - Authorization Flow Complete. + Complete the authorization flow. + + The user is redirected back to the DIRAC auth service after completing the IAM's authorization + flow. + We retrieve the original flow details from the decrypted state and store the ID token requested + from the IAM. + The user is then redirected to the client's redirect URI. :keyword code: Required. :paramtype code: str @@ -1456,7 +1593,7 @@ def authorization_flow_complete( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1552,7 +1689,7 @@ def serve_config( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1646,7 +1783,7 @@ def get_sandbox_file( :rtype: ~client.models.SandboxDownloadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1762,7 +1899,7 @@ def initiate_sandbox_upload( :rtype: ~client.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1819,6 +1956,302 @@ def initiate_sandbox_upload( return deserialized # type: ignore + @distributed_trace + def unassign_bulk_jobs_sandboxes( + self, *, jobs_ids: List[int], **kwargs: Any + ) -> Any: + """Unassign Bulk Jobs Sandboxes. + + Delete bulk jobs sandbox mapping. + + :keyword jobs_ids: Required. + :paramtype jobs_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_bulk_jobs_sandboxes_request( + jobs_ids=jobs_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_job_sandboxes(self, job_id: int, **kwargs: Any) -> Dict[str, List[Any]]: + """Get Job Sandboxes. + + Get input and output sandboxes of given job. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to list of any + :rtype: dict[str, list[any]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[Any]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("{[object]}", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: + """Unassign Job Sandboxes. + + Delete single job sandbox mapping. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_job_sandbox( + self, job_id: int, sandbox_type: Union[str, _models.SandboxType], **kwargs: Any + ) -> List[Any]: + """Get Job Sandbox. + + Get input or output sandbox of given job. + + :param job_id: Required. + :type job_id: int + :param sandbox_type: Known values are: "input" and "output". Required. + :type sandbox_type: str or ~client.models.SandboxType + :return: list of any + :rtype: list[any] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[Any]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandbox_request( + job_id=job_id, + sandbox_type=sandbox_type, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("[object]", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> Any: + """Assign Sandbox To Job. + + Mapp the pfn as output sandbox to job. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("Content-Type", "application/json") + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + _content = self._serialize.body(body, "str") + + _request = build_jobs_assign_sandbox_to_job_request( + job_id=job_id, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + response.read() # Load the body in memory and close the socket + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + @overload def submit_bulk_jobs( self, body: List[str], *, content_type: str = "application/json", **kwargs: Any @@ -1869,7 +2302,7 @@ def submit_bulk_jobs( :rtype: list[~client.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1938,7 +2371,7 @@ def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1994,7 +2427,7 @@ def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2054,7 +2487,7 @@ def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2112,7 +2545,7 @@ def get_job_status_bulk( :rtype: dict[str, ~client.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2228,7 +2661,7 @@ def set_job_status_bulk( :rtype: dict[str, ~client.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2300,7 +2733,7 @@ def get_job_status_history_bulk( :rtype: dict[str, list[~client.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2356,7 +2789,7 @@ def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2412,7 +2845,7 @@ def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2541,7 +2974,7 @@ def search( :rtype: list[JSON] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2657,7 +3090,7 @@ def summary( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2726,7 +3159,7 @@ def get_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2782,7 +3215,7 @@ def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2844,7 +3277,7 @@ def set_single_job_properties( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2908,7 +3341,7 @@ def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2968,7 +3401,7 @@ def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3026,7 +3459,7 @@ def get_single_job_status( :rtype: dict[str, ~client.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3151,7 +3584,7 @@ def set_single_job_status( :rtype: dict[str, ~client.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3224,7 +3657,7 @@ def get_single_job_status_history( :rtype: dict[str, list[~client.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError,